@hyperfrontend/versioning 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +593 -0
- package/CHANGELOG.md +35 -0
- package/FUNDING.md +141 -0
- package/LICENSE.md +21 -0
- package/README.md +195 -0
- package/SECURITY.md +82 -0
- package/changelog/compare/diff.d.ts +128 -0
- package/changelog/compare/diff.d.ts.map +1 -0
- package/changelog/compare/index.cjs.js +628 -0
- package/changelog/compare/index.cjs.js.map +1 -0
- package/changelog/compare/index.d.ts +4 -0
- package/changelog/compare/index.d.ts.map +1 -0
- package/changelog/compare/index.esm.js +612 -0
- package/changelog/compare/index.esm.js.map +1 -0
- package/changelog/compare/is-equal.d.ts +114 -0
- package/changelog/compare/is-equal.d.ts.map +1 -0
- package/changelog/index.cjs.js +6448 -0
- package/changelog/index.cjs.js.map +1 -0
- package/changelog/index.d.ts +6 -0
- package/changelog/index.d.ts.map +1 -0
- package/changelog/index.esm.js +6358 -0
- package/changelog/index.esm.js.map +1 -0
- package/changelog/models/changelog.d.ts +86 -0
- package/changelog/models/changelog.d.ts.map +1 -0
- package/changelog/models/commit-ref.d.ts +51 -0
- package/changelog/models/commit-ref.d.ts.map +1 -0
- package/changelog/models/entry.d.ts +84 -0
- package/changelog/models/entry.d.ts.map +1 -0
- package/changelog/models/index.cjs.js +2043 -0
- package/changelog/models/index.cjs.js.map +1 -0
- package/changelog/models/index.d.ts +11 -0
- package/changelog/models/index.d.ts.map +1 -0
- package/changelog/models/index.esm.js +2026 -0
- package/changelog/models/index.esm.js.map +1 -0
- package/changelog/models/schema.d.ts +68 -0
- package/changelog/models/schema.d.ts.map +1 -0
- package/changelog/models/section.d.ts +25 -0
- package/changelog/models/section.d.ts.map +1 -0
- package/changelog/operations/add-entry.d.ts +56 -0
- package/changelog/operations/add-entry.d.ts.map +1 -0
- package/changelog/operations/add-item.d.ts +18 -0
- package/changelog/operations/add-item.d.ts.map +1 -0
- package/changelog/operations/filter-by-predicate.d.ts +81 -0
- package/changelog/operations/filter-by-predicate.d.ts.map +1 -0
- package/changelog/operations/filter-by-range.d.ts +63 -0
- package/changelog/operations/filter-by-range.d.ts.map +1 -0
- package/changelog/operations/filter-entries.d.ts +9 -0
- package/changelog/operations/filter-entries.d.ts.map +1 -0
- package/changelog/operations/index.cjs.js +2455 -0
- package/changelog/operations/index.cjs.js.map +1 -0
- package/changelog/operations/index.d.ts +15 -0
- package/changelog/operations/index.d.ts.map +1 -0
- package/changelog/operations/index.esm.js +2411 -0
- package/changelog/operations/index.esm.js.map +1 -0
- package/changelog/operations/merge.d.ts +88 -0
- package/changelog/operations/merge.d.ts.map +1 -0
- package/changelog/operations/remove-entry.d.ts +45 -0
- package/changelog/operations/remove-entry.d.ts.map +1 -0
- package/changelog/operations/remove-section.d.ts +50 -0
- package/changelog/operations/remove-section.d.ts.map +1 -0
- package/changelog/operations/transform.d.ts +143 -0
- package/changelog/operations/transform.d.ts.map +1 -0
- package/changelog/parse/index.cjs.js +1282 -0
- package/changelog/parse/index.cjs.js.map +1 -0
- package/changelog/parse/index.d.ts +5 -0
- package/changelog/parse/index.d.ts.map +1 -0
- package/changelog/parse/index.esm.js +1275 -0
- package/changelog/parse/index.esm.js.map +1 -0
- package/changelog/parse/line.d.ts +48 -0
- package/changelog/parse/line.d.ts.map +1 -0
- package/changelog/parse/parser.d.ts +16 -0
- package/changelog/parse/parser.d.ts.map +1 -0
- package/changelog/parse/tokenizer.d.ts +49 -0
- package/changelog/parse/tokenizer.d.ts.map +1 -0
- package/changelog/serialize/index.cjs.js +574 -0
- package/changelog/serialize/index.cjs.js.map +1 -0
- package/changelog/serialize/index.d.ts +6 -0
- package/changelog/serialize/index.d.ts.map +1 -0
- package/changelog/serialize/index.esm.js +564 -0
- package/changelog/serialize/index.esm.js.map +1 -0
- package/changelog/serialize/templates.d.ts +81 -0
- package/changelog/serialize/templates.d.ts.map +1 -0
- package/changelog/serialize/to-json.d.ts +57 -0
- package/changelog/serialize/to-json.d.ts.map +1 -0
- package/changelog/serialize/to-string.d.ts +30 -0
- package/changelog/serialize/to-string.d.ts.map +1 -0
- package/commits/index.cjs.js +648 -0
- package/commits/index.cjs.js.map +1 -0
- package/commits/index.d.ts +3 -0
- package/commits/index.d.ts.map +1 -0
- package/commits/index.esm.js +629 -0
- package/commits/index.esm.js.map +1 -0
- package/commits/models/breaking.d.ts +39 -0
- package/commits/models/breaking.d.ts.map +1 -0
- package/commits/models/commit-type.d.ts +32 -0
- package/commits/models/commit-type.d.ts.map +1 -0
- package/commits/models/conventional.d.ts +49 -0
- package/commits/models/conventional.d.ts.map +1 -0
- package/commits/models/index.cjs.js +207 -0
- package/commits/models/index.cjs.js.map +1 -0
- package/commits/models/index.d.ts +7 -0
- package/commits/models/index.d.ts.map +1 -0
- package/commits/models/index.esm.js +193 -0
- package/commits/models/index.esm.js.map +1 -0
- package/commits/parse/body.d.ts +18 -0
- package/commits/parse/body.d.ts.map +1 -0
- package/commits/parse/footer.d.ts +16 -0
- package/commits/parse/footer.d.ts.map +1 -0
- package/commits/parse/header.d.ts +15 -0
- package/commits/parse/header.d.ts.map +1 -0
- package/commits/parse/index.cjs.js +505 -0
- package/commits/parse/index.cjs.js.map +1 -0
- package/commits/parse/index.d.ts +5 -0
- package/commits/parse/index.d.ts.map +1 -0
- package/commits/parse/index.esm.js +499 -0
- package/commits/parse/index.esm.js.map +1 -0
- package/commits/parse/message.d.ts +17 -0
- package/commits/parse/message.d.ts.map +1 -0
- package/commits/utils/replace-char.d.ts +19 -0
- package/commits/utils/replace-char.d.ts.map +1 -0
- package/flow/executor/execute.d.ts +72 -0
- package/flow/executor/execute.d.ts.map +1 -0
- package/flow/executor/index.cjs.js +4402 -0
- package/flow/executor/index.cjs.js.map +1 -0
- package/flow/executor/index.d.ts +3 -0
- package/flow/executor/index.d.ts.map +1 -0
- package/flow/executor/index.esm.js +4398 -0
- package/flow/executor/index.esm.js.map +1 -0
- package/flow/factory.d.ts +58 -0
- package/flow/factory.d.ts.map +1 -0
- package/flow/index.cjs.js +8506 -0
- package/flow/index.cjs.js.map +1 -0
- package/flow/index.d.ts +7 -0
- package/flow/index.d.ts.map +1 -0
- package/flow/index.esm.js +8451 -0
- package/flow/index.esm.js.map +1 -0
- package/flow/models/flow.d.ts +130 -0
- package/flow/models/flow.d.ts.map +1 -0
- package/flow/models/index.cjs.js +285 -0
- package/flow/models/index.cjs.js.map +1 -0
- package/flow/models/index.d.ts +7 -0
- package/flow/models/index.d.ts.map +1 -0
- package/flow/models/index.esm.js +268 -0
- package/flow/models/index.esm.js.map +1 -0
- package/flow/models/step.d.ts +108 -0
- package/flow/models/step.d.ts.map +1 -0
- package/flow/models/types.d.ts +150 -0
- package/flow/models/types.d.ts.map +1 -0
- package/flow/presets/conventional.d.ts +59 -0
- package/flow/presets/conventional.d.ts.map +1 -0
- package/flow/presets/independent.d.ts +61 -0
- package/flow/presets/independent.d.ts.map +1 -0
- package/flow/presets/index.cjs.js +3903 -0
- package/flow/presets/index.cjs.js.map +1 -0
- package/flow/presets/index.d.ts +4 -0
- package/flow/presets/index.d.ts.map +1 -0
- package/flow/presets/index.esm.js +3889 -0
- package/flow/presets/index.esm.js.map +1 -0
- package/flow/presets/synced.d.ts +65 -0
- package/flow/presets/synced.d.ts.map +1 -0
- package/flow/steps/analyze-commits.d.ts +19 -0
- package/flow/steps/analyze-commits.d.ts.map +1 -0
- package/flow/steps/calculate-bump.d.ts +27 -0
- package/flow/steps/calculate-bump.d.ts.map +1 -0
- package/flow/steps/create-commit.d.ts +16 -0
- package/flow/steps/create-commit.d.ts.map +1 -0
- package/flow/steps/create-tag.d.ts +22 -0
- package/flow/steps/create-tag.d.ts.map +1 -0
- package/flow/steps/fetch-registry.d.ts +19 -0
- package/flow/steps/fetch-registry.d.ts.map +1 -0
- package/flow/steps/generate-changelog.d.ts +25 -0
- package/flow/steps/generate-changelog.d.ts.map +1 -0
- package/flow/steps/index.cjs.js +3523 -0
- package/flow/steps/index.cjs.js.map +1 -0
- package/flow/steps/index.d.ts +8 -0
- package/flow/steps/index.d.ts.map +1 -0
- package/flow/steps/index.esm.js +3504 -0
- package/flow/steps/index.esm.js.map +1 -0
- package/flow/steps/update-packages.d.ts +25 -0
- package/flow/steps/update-packages.d.ts.map +1 -0
- package/flow/utils/interpolate.d.ts +11 -0
- package/flow/utils/interpolate.d.ts.map +1 -0
- package/git/factory.d.ts +233 -0
- package/git/factory.d.ts.map +1 -0
- package/git/index.cjs.js +2863 -0
- package/git/index.cjs.js.map +1 -0
- package/git/index.d.ts +5 -0
- package/git/index.d.ts.map +1 -0
- package/git/index.esm.js +2785 -0
- package/git/index.esm.js.map +1 -0
- package/git/models/commit.d.ts +129 -0
- package/git/models/commit.d.ts.map +1 -0
- package/git/models/index.cjs.js +755 -0
- package/git/models/index.cjs.js.map +1 -0
- package/git/models/index.d.ts +7 -0
- package/git/models/index.d.ts.map +1 -0
- package/git/models/index.esm.js +729 -0
- package/git/models/index.esm.js.map +1 -0
- package/git/models/ref.d.ts +120 -0
- package/git/models/ref.d.ts.map +1 -0
- package/git/models/tag.d.ts +141 -0
- package/git/models/tag.d.ts.map +1 -0
- package/git/operations/commit.d.ts +97 -0
- package/git/operations/commit.d.ts.map +1 -0
- package/git/operations/head-info.d.ts +29 -0
- package/git/operations/head-info.d.ts.map +1 -0
- package/git/operations/index.cjs.js +1954 -0
- package/git/operations/index.cjs.js.map +1 -0
- package/git/operations/index.d.ts +14 -0
- package/git/operations/index.d.ts.map +1 -0
- package/git/operations/index.esm.js +1903 -0
- package/git/operations/index.esm.js.map +1 -0
- package/git/operations/log.d.ts +104 -0
- package/git/operations/log.d.ts.map +1 -0
- package/git/operations/manage-tags.d.ts +60 -0
- package/git/operations/manage-tags.d.ts.map +1 -0
- package/git/operations/query-tags.d.ts +88 -0
- package/git/operations/query-tags.d.ts.map +1 -0
- package/git/operations/stage.d.ts +66 -0
- package/git/operations/stage.d.ts.map +1 -0
- package/git/operations/status.d.ts +173 -0
- package/git/operations/status.d.ts.map +1 -0
- package/index.cjs.js +16761 -0
- package/index.cjs.js.map +1 -0
- package/index.d.ts +102 -0
- package/index.d.ts.map +1 -0
- package/index.esm.js +16427 -0
- package/index.esm.js.map +1 -0
- package/package.json +200 -0
- package/registry/factory.d.ts +18 -0
- package/registry/factory.d.ts.map +1 -0
- package/registry/index.cjs.js +543 -0
- package/registry/index.cjs.js.map +1 -0
- package/registry/index.d.ts +5 -0
- package/registry/index.d.ts.map +1 -0
- package/registry/index.esm.js +535 -0
- package/registry/index.esm.js.map +1 -0
- package/registry/models/index.cjs.js +69 -0
- package/registry/models/index.cjs.js.map +1 -0
- package/registry/models/index.d.ts +6 -0
- package/registry/models/index.d.ts.map +1 -0
- package/registry/models/index.esm.js +66 -0
- package/registry/models/index.esm.js.map +1 -0
- package/registry/models/package-info.d.ts +55 -0
- package/registry/models/package-info.d.ts.map +1 -0
- package/registry/models/registry.d.ts +62 -0
- package/registry/models/registry.d.ts.map +1 -0
- package/registry/models/version-info.d.ts +67 -0
- package/registry/models/version-info.d.ts.map +1 -0
- package/registry/npm/cache.d.ts +50 -0
- package/registry/npm/cache.d.ts.map +1 -0
- package/registry/npm/client.d.ts +30 -0
- package/registry/npm/client.d.ts.map +1 -0
- package/registry/npm/index.cjs.js +456 -0
- package/registry/npm/index.cjs.js.map +1 -0
- package/registry/npm/index.d.ts +4 -0
- package/registry/npm/index.d.ts.map +1 -0
- package/registry/npm/index.esm.js +451 -0
- package/registry/npm/index.esm.js.map +1 -0
- package/semver/compare/compare.d.ts +100 -0
- package/semver/compare/compare.d.ts.map +1 -0
- package/semver/compare/index.cjs.js +386 -0
- package/semver/compare/index.cjs.js.map +1 -0
- package/semver/compare/index.d.ts +3 -0
- package/semver/compare/index.d.ts.map +1 -0
- package/semver/compare/index.esm.js +370 -0
- package/semver/compare/index.esm.js.map +1 -0
- package/semver/compare/sort.d.ts +36 -0
- package/semver/compare/sort.d.ts.map +1 -0
- package/semver/format/index.cjs.js +58 -0
- package/semver/format/index.cjs.js.map +1 -0
- package/semver/format/index.d.ts +2 -0
- package/semver/format/index.d.ts.map +1 -0
- package/semver/format/index.esm.js +53 -0
- package/semver/format/index.esm.js.map +1 -0
- package/semver/format/to-string.d.ts +31 -0
- package/semver/format/to-string.d.ts.map +1 -0
- package/semver/increment/bump.d.ts +37 -0
- package/semver/increment/bump.d.ts.map +1 -0
- package/semver/increment/index.cjs.js +223 -0
- package/semver/increment/index.cjs.js.map +1 -0
- package/semver/increment/index.d.ts +2 -0
- package/semver/increment/index.d.ts.map +1 -0
- package/semver/increment/index.esm.js +219 -0
- package/semver/increment/index.esm.js.map +1 -0
- package/semver/index.cjs.js +1499 -0
- package/semver/index.cjs.js.map +1 -0
- package/semver/index.d.ts +6 -0
- package/semver/index.d.ts.map +1 -0
- package/semver/index.esm.js +1458 -0
- package/semver/index.esm.js.map +1 -0
- package/semver/models/index.cjs.js +153 -0
- package/semver/models/index.cjs.js.map +1 -0
- package/semver/models/index.d.ts +5 -0
- package/semver/models/index.d.ts.map +1 -0
- package/semver/models/index.esm.js +139 -0
- package/semver/models/index.esm.js.map +1 -0
- package/semver/models/range.d.ts +83 -0
- package/semver/models/range.d.ts.map +1 -0
- package/semver/models/version.d.ts +78 -0
- package/semver/models/version.d.ts.map +1 -0
- package/semver/parse/index.cjs.js +799 -0
- package/semver/parse/index.cjs.js.map +1 -0
- package/semver/parse/index.d.ts +5 -0
- package/semver/parse/index.d.ts.map +1 -0
- package/semver/parse/index.esm.js +793 -0
- package/semver/parse/index.esm.js.map +1 -0
- package/semver/parse/range.d.ts +38 -0
- package/semver/parse/range.d.ts.map +1 -0
- package/semver/parse/version.d.ts +49 -0
- package/semver/parse/version.d.ts.map +1 -0
- package/workspace/discovery/changelog-path.d.ts +21 -0
- package/workspace/discovery/changelog-path.d.ts.map +1 -0
- package/workspace/discovery/dependencies.d.ts +145 -0
- package/workspace/discovery/dependencies.d.ts.map +1 -0
- package/workspace/discovery/discover-changelogs.d.ts +76 -0
- package/workspace/discovery/discover-changelogs.d.ts.map +1 -0
- package/workspace/discovery/index.cjs.js +2300 -0
- package/workspace/discovery/index.cjs.js.map +1 -0
- package/workspace/discovery/index.d.ts +13 -0
- package/workspace/discovery/index.d.ts.map +1 -0
- package/workspace/discovery/index.esm.js +2283 -0
- package/workspace/discovery/index.esm.js.map +1 -0
- package/workspace/discovery/packages.d.ts +83 -0
- package/workspace/discovery/packages.d.ts.map +1 -0
- package/workspace/index.cjs.js +4445 -0
- package/workspace/index.cjs.js.map +1 -0
- package/workspace/index.d.ts +52 -0
- package/workspace/index.d.ts.map +1 -0
- package/workspace/index.esm.js +4394 -0
- package/workspace/index.esm.js.map +1 -0
- package/workspace/models/index.cjs.js +284 -0
- package/workspace/models/index.cjs.js.map +1 -0
- package/workspace/models/index.d.ts +10 -0
- package/workspace/models/index.d.ts.map +1 -0
- package/workspace/models/index.esm.js +261 -0
- package/workspace/models/index.esm.js.map +1 -0
- package/workspace/models/project.d.ts +118 -0
- package/workspace/models/project.d.ts.map +1 -0
- package/workspace/models/workspace.d.ts +139 -0
- package/workspace/models/workspace.d.ts.map +1 -0
- package/workspace/operations/batch-update.d.ts +99 -0
- package/workspace/operations/batch-update.d.ts.map +1 -0
- package/workspace/operations/cascade-bump.d.ts +125 -0
- package/workspace/operations/cascade-bump.d.ts.map +1 -0
- package/workspace/operations/index.cjs.js +2675 -0
- package/workspace/operations/index.cjs.js.map +1 -0
- package/workspace/operations/index.d.ts +12 -0
- package/workspace/operations/index.d.ts.map +1 -0
- package/workspace/operations/index.esm.js +2663 -0
- package/workspace/operations/index.esm.js.map +1 -0
- package/workspace/operations/validate.d.ts +85 -0
- package/workspace/operations/validate.d.ts.map +1 -0
|
@@ -0,0 +1,4394 @@
|
|
|
1
|
+
import { dirname, join as join$1, resolve, parse as parse$1 } from 'node:path';
|
|
2
|
+
import 'node:util';
|
|
3
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, statSync, lstatSync, readdirSync } from 'node:fs';
|
|
4
|
+
import 'node:os';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Safe copies of Map built-in via factory function.
|
|
8
|
+
*
|
|
9
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
10
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
11
|
+
*
|
|
12
|
+
* These references are captured at module initialization time to protect against
|
|
13
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
14
|
+
*
|
|
15
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
16
|
+
*/
|
|
17
|
+
// Capture references at module initialization time
|
|
18
|
+
const _Map = globalThis.Map;
|
|
19
|
+
const _Reflect$2 = globalThis.Reflect;
|
|
20
|
+
/**
|
|
21
|
+
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
22
|
+
* Use this instead of `new Map()`.
|
|
23
|
+
*
|
|
24
|
+
* @param iterable - Optional iterable of key-value pairs.
|
|
25
|
+
* @returns A new Map instance.
|
|
26
|
+
*/
|
|
27
|
+
const createMap = (iterable) => _Reflect$2.construct(_Map, iterable ? [iterable] : []);
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Safe copies of Console built-in methods.
|
|
31
|
+
*
|
|
32
|
+
* These references are captured at module initialization time to protect against
|
|
33
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
34
|
+
*
|
|
35
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/console
|
|
36
|
+
*/
|
|
37
|
+
// Capture references at module initialization time
|
|
38
|
+
const _console = globalThis.console;
|
|
39
|
+
/**
|
|
40
|
+
* (Safe copy) Outputs a message to the console.
|
|
41
|
+
*/
|
|
42
|
+
const log = _console.log.bind(_console);
|
|
43
|
+
/**
|
|
44
|
+
* (Safe copy) Outputs a warning message to the console.
|
|
45
|
+
*/
|
|
46
|
+
const warn = _console.warn.bind(_console);
|
|
47
|
+
/**
|
|
48
|
+
* (Safe copy) Outputs an error message to the console.
|
|
49
|
+
*/
|
|
50
|
+
const error = _console.error.bind(_console);
|
|
51
|
+
/**
|
|
52
|
+
* (Safe copy) Outputs an informational message to the console.
|
|
53
|
+
*/
|
|
54
|
+
const info = _console.info.bind(_console);
|
|
55
|
+
/**
|
|
56
|
+
* (Safe copy) Outputs a debug message to the console.
|
|
57
|
+
*/
|
|
58
|
+
const debug = _console.debug.bind(_console);
|
|
59
|
+
/**
|
|
60
|
+
* (Safe copy) Outputs a stack trace to the console.
|
|
61
|
+
*/
|
|
62
|
+
_console.trace.bind(_console);
|
|
63
|
+
/**
|
|
64
|
+
* (Safe copy) Displays an interactive listing of the properties of a specified object.
|
|
65
|
+
*/
|
|
66
|
+
_console.dir.bind(_console);
|
|
67
|
+
/**
|
|
68
|
+
* (Safe copy) Displays tabular data as a table.
|
|
69
|
+
*/
|
|
70
|
+
_console.table.bind(_console);
|
|
71
|
+
/**
|
|
72
|
+
* (Safe copy) Writes an error message to the console if the assertion is false.
|
|
73
|
+
*/
|
|
74
|
+
_console.assert.bind(_console);
|
|
75
|
+
/**
|
|
76
|
+
* (Safe copy) Clears the console.
|
|
77
|
+
*/
|
|
78
|
+
_console.clear.bind(_console);
|
|
79
|
+
/**
|
|
80
|
+
* (Safe copy) Logs the number of times that this particular call to count() has been called.
|
|
81
|
+
*/
|
|
82
|
+
_console.count.bind(_console);
|
|
83
|
+
/**
|
|
84
|
+
* (Safe copy) Resets the counter used with console.count().
|
|
85
|
+
*/
|
|
86
|
+
_console.countReset.bind(_console);
|
|
87
|
+
/**
|
|
88
|
+
* (Safe copy) Creates a new inline group in the console.
|
|
89
|
+
*/
|
|
90
|
+
_console.group.bind(_console);
|
|
91
|
+
/**
|
|
92
|
+
* (Safe copy) Creates a new inline group in the console that is initially collapsed.
|
|
93
|
+
*/
|
|
94
|
+
_console.groupCollapsed.bind(_console);
|
|
95
|
+
/**
|
|
96
|
+
* (Safe copy) Exits the current inline group.
|
|
97
|
+
*/
|
|
98
|
+
_console.groupEnd.bind(_console);
|
|
99
|
+
/**
|
|
100
|
+
* (Safe copy) Starts a timer with a name specified as an input parameter.
|
|
101
|
+
*/
|
|
102
|
+
_console.time.bind(_console);
|
|
103
|
+
/**
|
|
104
|
+
* (Safe copy) Stops a timer that was previously started.
|
|
105
|
+
*/
|
|
106
|
+
_console.timeEnd.bind(_console);
|
|
107
|
+
/**
|
|
108
|
+
* (Safe copy) Logs the current value of a timer that was previously started.
|
|
109
|
+
*/
|
|
110
|
+
_console.timeLog.bind(_console);
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Safe copies of Array built-in static methods.
|
|
114
|
+
*
|
|
115
|
+
* These references are captured at module initialization time to protect against
|
|
116
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
117
|
+
*
|
|
118
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/array
|
|
119
|
+
*/
|
|
120
|
+
// Capture references at module initialization time
|
|
121
|
+
const _Array = globalThis.Array;
|
|
122
|
+
/**
|
|
123
|
+
* (Safe copy) Determines whether the passed value is an Array.
|
|
124
|
+
*/
|
|
125
|
+
const isArray = _Array.isArray;
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Safe copies of JSON built-in methods.
|
|
129
|
+
*
|
|
130
|
+
* These references are captured at module initialization time to protect against
|
|
131
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
132
|
+
*
|
|
133
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/json
|
|
134
|
+
*/
|
|
135
|
+
// Capture references at module initialization time
|
|
136
|
+
const _JSON = globalThis.JSON;
|
|
137
|
+
/**
|
|
138
|
+
* (Safe copy) Converts a JavaScript Object Notation (JSON) string into an object.
|
|
139
|
+
*/
|
|
140
|
+
const parse = _JSON.parse;
|
|
141
|
+
/**
|
|
142
|
+
* (Safe copy) Converts a JavaScript value to a JavaScript Object Notation (JSON) string.
|
|
143
|
+
*/
|
|
144
|
+
const stringify = _JSON.stringify;
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Safe copies of Object built-in methods.
|
|
148
|
+
*
|
|
149
|
+
* These references are captured at module initialization time to protect against
|
|
150
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
151
|
+
*
|
|
152
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
153
|
+
*/
|
|
154
|
+
// Capture references at module initialization time
|
|
155
|
+
const _Object = globalThis.Object;
|
|
156
|
+
/**
|
|
157
|
+
* (Safe copy) Prevents modification of existing property attributes and values,
|
|
158
|
+
* and prevents the addition of new properties.
|
|
159
|
+
*/
|
|
160
|
+
const freeze = _Object.freeze;
|
|
161
|
+
/**
|
|
162
|
+
* (Safe copy) Returns the names of the enumerable string properties and methods of an object.
|
|
163
|
+
*/
|
|
164
|
+
const keys = _Object.keys;
|
|
165
|
+
/**
|
|
166
|
+
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
167
|
+
*/
|
|
168
|
+
const entries = _Object.entries;
|
|
169
|
+
/**
|
|
170
|
+
* (Safe copy) Returns an array of values of the enumerable own properties of an object.
|
|
171
|
+
*/
|
|
172
|
+
const values = _Object.values;
|
|
173
|
+
/**
|
|
174
|
+
* (Safe copy) Adds one or more properties to an object, and/or modifies attributes of existing properties.
|
|
175
|
+
*/
|
|
176
|
+
const defineProperties = _Object.defineProperties;
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Safe copies of Set built-in via factory function.
|
|
180
|
+
*
|
|
181
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
182
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
183
|
+
*
|
|
184
|
+
* These references are captured at module initialization time to protect against
|
|
185
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
186
|
+
*
|
|
187
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/set
|
|
188
|
+
*/
|
|
189
|
+
// Capture references at module initialization time
|
|
190
|
+
const _Set = globalThis.Set;
|
|
191
|
+
const _Reflect$1 = globalThis.Reflect;
|
|
192
|
+
/**
|
|
193
|
+
* (Safe copy) Creates a new Set using the captured Set constructor.
|
|
194
|
+
* Use this instead of `new Set()`.
|
|
195
|
+
*
|
|
196
|
+
* @param iterable - Optional iterable of values.
|
|
197
|
+
* @returns A new Set instance.
|
|
198
|
+
*/
|
|
199
|
+
const createSet = (iterable) => _Reflect$1.construct(_Set, iterable ? [iterable] : []);
|
|
200
|
+
|
|
201
|
+
const registeredClasses = [];
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Returns the data type of the target.
|
|
205
|
+
* Uses native `typeof` operator, however, makes distinction between `null`, `array`, and `object`.
|
|
206
|
+
* Also, when classes are registered via `registerClass`, it checks if objects are instance of any known registered class.
|
|
207
|
+
*
|
|
208
|
+
* @param target - The target to get the data type of.
|
|
209
|
+
* @returns The data type of the target.
|
|
210
|
+
*/
|
|
211
|
+
const getType = (target) => {
|
|
212
|
+
if (target === null)
|
|
213
|
+
return 'null';
|
|
214
|
+
const nativeDataType = typeof target;
|
|
215
|
+
if (nativeDataType === 'object') {
|
|
216
|
+
if (isArray(target))
|
|
217
|
+
return 'array';
|
|
218
|
+
for (const registeredClass of registeredClasses) {
|
|
219
|
+
if (target instanceof registeredClass)
|
|
220
|
+
return registeredClass.name;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return nativeDataType;
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Safe copies of Error built-ins via factory functions.
|
|
228
|
+
*
|
|
229
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
230
|
+
* provides factory functions that use Reflect.construct internally.
|
|
231
|
+
*
|
|
232
|
+
* These references are captured at module initialization time to protect against
|
|
233
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
234
|
+
*
|
|
235
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/error
|
|
236
|
+
*/
|
|
237
|
+
// Capture references at module initialization time
|
|
238
|
+
const _Error = globalThis.Error;
|
|
239
|
+
const _Reflect = globalThis.Reflect;
|
|
240
|
+
/**
|
|
241
|
+
* (Safe copy) Creates a new Error using the captured Error constructor.
|
|
242
|
+
* Use this instead of `new Error()`.
|
|
243
|
+
*
|
|
244
|
+
* @param message - Optional error message.
|
|
245
|
+
* @param options - Optional error options.
|
|
246
|
+
* @returns A new Error instance.
|
|
247
|
+
*/
|
|
248
|
+
const createError = (message, options) => _Reflect.construct(_Error, [message, options]);
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Safe copies of Math built-in methods.
|
|
252
|
+
*
|
|
253
|
+
* These references are captured at module initialization time to protect against
|
|
254
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
255
|
+
*
|
|
256
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
257
|
+
*/
|
|
258
|
+
// Capture references at module initialization time
|
|
259
|
+
const _Math = globalThis.Math;
|
|
260
|
+
// ============================================================================
|
|
261
|
+
// Min/Max
|
|
262
|
+
// ============================================================================
|
|
263
|
+
/**
|
|
264
|
+
* (Safe copy) Returns the larger of zero or more numbers.
|
|
265
|
+
*/
|
|
266
|
+
const max = _Math.max;
|
|
267
|
+
|
|
268
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
269
|
+
/**
|
|
270
|
+
* Creates a wrapper function that only executes the wrapped function if the condition function returns true.
|
|
271
|
+
*
|
|
272
|
+
* @param func - The function to be conditionally executed.
|
|
273
|
+
* @param conditionFunc - A function that returns a boolean, determining if `func` should be executed.
|
|
274
|
+
* @returns A wrapped version of `func` that executes conditionally.
|
|
275
|
+
*/
|
|
276
|
+
function createConditionalExecutionFunction(func, conditionFunc) {
|
|
277
|
+
return function (...args) {
|
|
278
|
+
if (conditionFunc()) {
|
|
279
|
+
return func(...args);
|
|
280
|
+
}
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
285
|
+
/**
|
|
286
|
+
* Creates a wrapper function that silently ignores any errors thrown by the wrapped void function.
|
|
287
|
+
* This function is specifically for wrapping functions that do not return a value (void functions).
|
|
288
|
+
* Exceptions are swallowed without any logging or handling.
|
|
289
|
+
*
|
|
290
|
+
* @param func - The void function to be wrapped.
|
|
291
|
+
* @returns A wrapped version of the input function that ignores errors.
|
|
292
|
+
*/
|
|
293
|
+
function createErrorIgnoringFunction(func) {
|
|
294
|
+
return function (...args) {
|
|
295
|
+
try {
|
|
296
|
+
func(...args);
|
|
297
|
+
}
|
|
298
|
+
catch {
|
|
299
|
+
// Deliberately swallowing/ignoring the exception
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
305
|
+
/**
|
|
306
|
+
* A no-operation function (noop) that does nothing regardless of the arguments passed.
|
|
307
|
+
* It is designed to be as permissive as possible in its typing without using the `Function` keyword.
|
|
308
|
+
*
|
|
309
|
+
* @param args - Any arguments passed to the function (ignored)
|
|
310
|
+
*/
|
|
311
|
+
const noop = (...args) => {
|
|
312
|
+
// Intentionally does nothing
|
|
313
|
+
};
|
|
314
|
+
|
|
315
|
+
const logLevels = ['none', 'error', 'warn', 'log', 'info', 'debug'];
|
|
316
|
+
const priority = {
|
|
317
|
+
error: 4,
|
|
318
|
+
warn: 3,
|
|
319
|
+
log: 2,
|
|
320
|
+
info: 1,
|
|
321
|
+
debug: 0,
|
|
322
|
+
};
|
|
323
|
+
/**
|
|
324
|
+
* Validates whether a given string is a valid log level.
|
|
325
|
+
*
|
|
326
|
+
* @param level - The log level to validate
|
|
327
|
+
* @returns True if the level is valid, false otherwise
|
|
328
|
+
*/
|
|
329
|
+
function isValidLogLevel(level) {
|
|
330
|
+
return logLevels.includes(level);
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Creates a log level configuration manager for controlling logging behavior.
|
|
334
|
+
* Provides methods to get, set, and evaluate log levels based on priority.
|
|
335
|
+
*
|
|
336
|
+
* @param level - The initial log level (defaults to 'error')
|
|
337
|
+
* @returns A configuration object with log level management methods
|
|
338
|
+
* @throws {Error} When the provided level is not a valid log level
|
|
339
|
+
*/
|
|
340
|
+
function createLogLevelConfig(level = 'error') {
|
|
341
|
+
if (!isValidLogLevel(level)) {
|
|
342
|
+
throw createError('Cannot create log level configuration with a valid default log level');
|
|
343
|
+
}
|
|
344
|
+
const state = { level };
|
|
345
|
+
const getLogLevel = () => state.level;
|
|
346
|
+
const setLogLevel = (level) => {
|
|
347
|
+
if (!isValidLogLevel(level)) {
|
|
348
|
+
throw createError(`Cannot set value '${level}' level. Expected levels are ${logLevels}.`);
|
|
349
|
+
}
|
|
350
|
+
state.level = level;
|
|
351
|
+
};
|
|
352
|
+
const shouldLog = (level) => {
|
|
353
|
+
if (state.level === 'none' || level === 'none' || !isValidLogLevel(level)) {
|
|
354
|
+
return false;
|
|
355
|
+
}
|
|
356
|
+
return priority[level] >= priority[state.level];
|
|
357
|
+
};
|
|
358
|
+
return freeze({
|
|
359
|
+
getLogLevel,
|
|
360
|
+
setLogLevel,
|
|
361
|
+
shouldLog,
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
/**
|
|
366
|
+
* Creates a logger instance with configurable log level filtering.
|
|
367
|
+
* Each log function is wrapped to respect the current log level setting.
|
|
368
|
+
*
|
|
369
|
+
* @param error - Function to handle error-level logs (required)
|
|
370
|
+
* @param warn - Function to handle warning-level logs (optional, defaults to noop)
|
|
371
|
+
* @param log - Function to handle standard logs (optional, defaults to noop)
|
|
372
|
+
* @param info - Function to handle info-level logs (optional, defaults to noop)
|
|
373
|
+
* @param debug - Function to handle debug-level logs (optional, defaults to noop)
|
|
374
|
+
* @returns A frozen logger object with log methods and level control
|
|
375
|
+
* @throws {ErrorLevelFn} When any provided log function is invalid
|
|
376
|
+
*/
|
|
377
|
+
function createLogger(error, warn = noop, log = noop, info = noop, debug = noop) {
|
|
378
|
+
if (notValidLogFn(error)) {
|
|
379
|
+
throw createError(notFnMsg('error'));
|
|
380
|
+
}
|
|
381
|
+
if (notValidLogFn(warn)) {
|
|
382
|
+
throw createError(notFnMsg('warn'));
|
|
383
|
+
}
|
|
384
|
+
if (notValidLogFn(log)) {
|
|
385
|
+
throw createError(notFnMsg('log'));
|
|
386
|
+
}
|
|
387
|
+
if (notValidLogFn(info)) {
|
|
388
|
+
throw createError(notFnMsg('info'));
|
|
389
|
+
}
|
|
390
|
+
if (notValidLogFn(debug)) {
|
|
391
|
+
throw createError(notFnMsg('debug'));
|
|
392
|
+
}
|
|
393
|
+
const { setLogLevel, getLogLevel, shouldLog } = createLogLevelConfig();
|
|
394
|
+
const wrapLogFn = (fn, level) => {
|
|
395
|
+
if (fn === noop)
|
|
396
|
+
return fn;
|
|
397
|
+
const condition = () => shouldLog(level);
|
|
398
|
+
return createConditionalExecutionFunction(createErrorIgnoringFunction(fn), condition);
|
|
399
|
+
};
|
|
400
|
+
return freeze({
|
|
401
|
+
error: wrapLogFn(error, 'error'),
|
|
402
|
+
warn: wrapLogFn(warn, 'warn'),
|
|
403
|
+
log: wrapLogFn(log, 'log'),
|
|
404
|
+
info: wrapLogFn(info, 'info'),
|
|
405
|
+
debug: wrapLogFn(debug, 'debug'),
|
|
406
|
+
setLogLevel,
|
|
407
|
+
getLogLevel,
|
|
408
|
+
});
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Validates whether a given value is a valid log function.
|
|
412
|
+
*
|
|
413
|
+
* @param fn - The value to validate
|
|
414
|
+
* @returns True if the value is not a function (invalid), false if it is valid
|
|
415
|
+
*/
|
|
416
|
+
function notValidLogFn(fn) {
|
|
417
|
+
return getType(fn) !== 'function' && fn !== noop;
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Generates an error message for invalid log function parameters.
|
|
421
|
+
*
|
|
422
|
+
* @param label - The name of the log function that failed validation
|
|
423
|
+
* @returns A formatted error message string
|
|
424
|
+
*/
|
|
425
|
+
function notFnMsg(label) {
|
|
426
|
+
return `Cannot create a logger when ${label} is not a function`;
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
createLogger(error, warn, log, info, debug);
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Global log level registry.
|
|
433
|
+
* Tracks all created scoped loggers to allow global log level changes.
|
|
434
|
+
*/
|
|
435
|
+
const loggerRegistry = createSet();
|
|
436
|
+
/** Redacted placeholder for sensitive values */
|
|
437
|
+
const REDACTED = '[REDACTED]';
|
|
438
|
+
/**
|
|
439
|
+
* Patterns that indicate a sensitive key name.
|
|
440
|
+
* Keys containing these patterns will have their values sanitized.
|
|
441
|
+
*/
|
|
442
|
+
const SENSITIVE_KEY_PATTERNS = [
|
|
443
|
+
/token/i,
|
|
444
|
+
/key/i,
|
|
445
|
+
/password/i,
|
|
446
|
+
/secret/i,
|
|
447
|
+
/credential/i,
|
|
448
|
+
/auth/i,
|
|
449
|
+
/bearer/i,
|
|
450
|
+
/api[_-]?key/i,
|
|
451
|
+
/private/i,
|
|
452
|
+
/passphrase/i,
|
|
453
|
+
];
|
|
454
|
+
/**
|
|
455
|
+
* Checks if a key name indicates sensitive data.
|
|
456
|
+
*
|
|
457
|
+
* @param key - Key name to check
|
|
458
|
+
* @returns True if the key indicates sensitive data
|
|
459
|
+
*/
|
|
460
|
+
function isSensitiveKey(key) {
|
|
461
|
+
return SENSITIVE_KEY_PATTERNS.some((pattern) => pattern.test(key));
|
|
462
|
+
}
|
|
463
|
+
/**
|
|
464
|
+
* Sanitizes an object by replacing sensitive values with REDACTED.
|
|
465
|
+
* This function recursively processes nested objects and arrays.
|
|
466
|
+
*
|
|
467
|
+
* @param obj - Object to sanitize
|
|
468
|
+
* @returns New object with sensitive values redacted
|
|
469
|
+
*/
|
|
470
|
+
function sanitize(obj) {
|
|
471
|
+
if (obj === null || obj === undefined) {
|
|
472
|
+
return obj;
|
|
473
|
+
}
|
|
474
|
+
if (isArray(obj)) {
|
|
475
|
+
return obj.map((item) => sanitize(item));
|
|
476
|
+
}
|
|
477
|
+
if (typeof obj === 'object') {
|
|
478
|
+
const result = {};
|
|
479
|
+
for (const [key, value] of entries(obj)) {
|
|
480
|
+
if (isSensitiveKey(key)) {
|
|
481
|
+
result[key] = REDACTED;
|
|
482
|
+
}
|
|
483
|
+
else if (typeof value === 'object' && value !== null) {
|
|
484
|
+
result[key] = sanitize(value);
|
|
485
|
+
}
|
|
486
|
+
else {
|
|
487
|
+
result[key] = value;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
return result;
|
|
491
|
+
}
|
|
492
|
+
return obj;
|
|
493
|
+
}
|
|
494
|
+
/**
|
|
495
|
+
* Formats a log message with optional metadata.
|
|
496
|
+
*
|
|
497
|
+
* @param namespace - Logger namespace prefix
|
|
498
|
+
* @param message - Log message
|
|
499
|
+
* @param meta - Optional metadata object
|
|
500
|
+
* @returns Formatted log string
|
|
501
|
+
*/
|
|
502
|
+
function formatMessage(namespace, message, meta) {
|
|
503
|
+
const prefix = `[${namespace}]`;
|
|
504
|
+
if (meta && keys(meta).length > 0) {
|
|
505
|
+
const sanitizedMeta = sanitize(meta);
|
|
506
|
+
return `${prefix} ${message} ${stringify(sanitizedMeta)}`;
|
|
507
|
+
}
|
|
508
|
+
return `${prefix} ${message}`;
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Creates a scoped logger with namespace prefix and optional secret sanitization.
|
|
512
|
+
* All log messages will be prefixed with [namespace] and sensitive metadata
|
|
513
|
+
* values will be automatically redacted.
|
|
514
|
+
*
|
|
515
|
+
* @param namespace - Logger namespace (e.g., 'project-scope', 'analyze')
|
|
516
|
+
* @param options - Logger configuration options
|
|
517
|
+
* @returns A configured scoped logger instance
|
|
518
|
+
*
|
|
519
|
+
* @example
|
|
520
|
+
* ```typescript
|
|
521
|
+
* const logger = createScopedLogger('project-scope')
|
|
522
|
+
* logger.setLogLevel('debug')
|
|
523
|
+
*
|
|
524
|
+
* // Basic logging
|
|
525
|
+
* logger.info('Starting analysis', { path: './project' })
|
|
526
|
+
*
|
|
527
|
+
* // Sensitive data is automatically redacted
|
|
528
|
+
* logger.debug('Config loaded', { apiKey: 'secret123' })
|
|
529
|
+
* // Output: [project-scope] Config loaded {"apiKey":"[REDACTED]"}
|
|
530
|
+
* ```
|
|
531
|
+
*/
|
|
532
|
+
function createScopedLogger(namespace, options = {}) {
|
|
533
|
+
const { level = 'error', sanitizeSecrets = true } = options;
|
|
534
|
+
// Create wrapper functions that add namespace prefix and sanitization
|
|
535
|
+
const createLogFn = (baseFn) => (message, meta) => {
|
|
536
|
+
const processedMeta = sanitizeSecrets && meta ? sanitize(meta) : meta;
|
|
537
|
+
baseFn(formatMessage(namespace, message, processedMeta));
|
|
538
|
+
};
|
|
539
|
+
// Create base logger with wrapped functions
|
|
540
|
+
const baseLogger = createLogger(createLogFn(error), createLogFn(warn), createLogFn(log), createLogFn(info), createLogFn(debug));
|
|
541
|
+
// Set initial log level (use global override if set)
|
|
542
|
+
baseLogger.setLogLevel(level);
|
|
543
|
+
const scopedLogger = freeze({
|
|
544
|
+
error: (message, meta) => baseLogger.error(message, meta),
|
|
545
|
+
warn: (message, meta) => baseLogger.warn(message, meta),
|
|
546
|
+
log: (message, meta) => baseLogger.log(message, meta),
|
|
547
|
+
info: (message, meta) => baseLogger.info(message, meta),
|
|
548
|
+
debug: (message, meta) => baseLogger.debug(message, meta),
|
|
549
|
+
setLogLevel: baseLogger.setLogLevel,
|
|
550
|
+
getLogLevel: baseLogger.getLogLevel,
|
|
551
|
+
});
|
|
552
|
+
// Register logger for global level management
|
|
553
|
+
loggerRegistry.add(scopedLogger);
|
|
554
|
+
return scopedLogger;
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Default logger instance for the project-scope library.
|
|
558
|
+
* Use this for general logging within the library.
|
|
559
|
+
*
|
|
560
|
+
* @example
|
|
561
|
+
* ```typescript
|
|
562
|
+
* import { logger } from '@hyperfrontend/project-scope/core'
|
|
563
|
+
*
|
|
564
|
+
* logger.setLogLevel('debug')
|
|
565
|
+
* logger.debug('Analyzing project', { path: './src' })
|
|
566
|
+
* ```
|
|
567
|
+
*/
|
|
568
|
+
createScopedLogger('project-scope');
|
|
569
|
+
|
|
570
|
+
const fsLogger = createScopedLogger('project-scope:fs');
|
|
571
|
+
/**
|
|
572
|
+
* Create a file system error with code and context.
|
|
573
|
+
*
|
|
574
|
+
* @param message - The error message describing what went wrong
|
|
575
|
+
* @param code - The category code for this type of filesystem failure
|
|
576
|
+
* @param context - Additional context including path, operation, and cause
|
|
577
|
+
* @returns A configured Error object with code and context properties
|
|
578
|
+
*/
|
|
579
|
+
function createFileSystemError(message, code, context) {
|
|
580
|
+
const error = createError(message);
|
|
581
|
+
defineProperties(error, {
|
|
582
|
+
code: { value: code, enumerable: true },
|
|
583
|
+
context: { value: context, enumerable: true },
|
|
584
|
+
});
|
|
585
|
+
return error;
|
|
586
|
+
}
|
|
587
|
+
/**
|
|
588
|
+
* Read file contents as string.
|
|
589
|
+
*
|
|
590
|
+
* @param filePath - Path to file
|
|
591
|
+
* @param encoding - File encoding (default: utf-8)
|
|
592
|
+
* @returns File contents as string
|
|
593
|
+
* @throws {Error} If file doesn't exist or can't be read
|
|
594
|
+
*
|
|
595
|
+
* @example
|
|
596
|
+
* ```typescript
|
|
597
|
+
* import { readFileContent } from '@hyperfrontend/project-scope'
|
|
598
|
+
*
|
|
599
|
+
* const content = readFileContent('./package.json')
|
|
600
|
+
* console.log(content) // JSON string
|
|
601
|
+
* ```
|
|
602
|
+
*/
|
|
603
|
+
function readFileContent(filePath, encoding = 'utf-8') {
|
|
604
|
+
if (!existsSync(filePath)) {
|
|
605
|
+
fsLogger.debug('File not found', { path: filePath });
|
|
606
|
+
throw createFileSystemError(`File not found: ${filePath}`, 'FS_NOT_FOUND', { path: filePath, operation: 'read' });
|
|
607
|
+
}
|
|
608
|
+
try {
|
|
609
|
+
return readFileSync(filePath, { encoding });
|
|
610
|
+
}
|
|
611
|
+
catch (error) {
|
|
612
|
+
fsLogger.warn('Failed to read file', { path: filePath });
|
|
613
|
+
throw createFileSystemError(`Failed to read file: ${filePath}`, 'FS_READ_ERROR', { path: filePath, operation: 'read', cause: error });
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Read file if exists, return null otherwise.
|
|
618
|
+
*
|
|
619
|
+
* @param filePath - Path to file
|
|
620
|
+
* @param encoding - File encoding (default: utf-8)
|
|
621
|
+
* @returns File contents or null if file doesn't exist
|
|
622
|
+
*/
|
|
623
|
+
function readFileIfExists(filePath, encoding = 'utf-8') {
|
|
624
|
+
if (!existsSync(filePath)) {
|
|
625
|
+
return null;
|
|
626
|
+
}
|
|
627
|
+
try {
|
|
628
|
+
return readFileSync(filePath, { encoding });
|
|
629
|
+
}
|
|
630
|
+
catch {
|
|
631
|
+
return null;
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
const fsWriteLogger = createScopedLogger('project-scope:fs:write');
|
|
636
|
+
/**
|
|
637
|
+
* Ensure directory exists, create recursively if not.
|
|
638
|
+
*
|
|
639
|
+
* @param dirPath - Directory path to ensure exists
|
|
640
|
+
*/
|
|
641
|
+
function ensureDir(dirPath) {
|
|
642
|
+
if (!existsSync(dirPath)) {
|
|
643
|
+
fsWriteLogger.debug('Creating directory recursively', { path: dirPath });
|
|
644
|
+
mkdirSync(dirPath, { recursive: true });
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
/**
|
|
648
|
+
* Write string content to file.
|
|
649
|
+
* Creates parent directories if needed.
|
|
650
|
+
*
|
|
651
|
+
* @param filePath - Absolute or relative path to the target file
|
|
652
|
+
* @param content - String content to write to the file
|
|
653
|
+
* @param options - Optional write configuration (encoding, mode)
|
|
654
|
+
* @throws {Error} If write fails
|
|
655
|
+
*
|
|
656
|
+
* @example
|
|
657
|
+
* ```typescript
|
|
658
|
+
* import { writeFileContent } from '@hyperfrontend/project-scope'
|
|
659
|
+
*
|
|
660
|
+
* // Write a simple text file
|
|
661
|
+
* writeFileContent('./output/data.txt', 'Hello, World!')
|
|
662
|
+
*
|
|
663
|
+
* // Write with custom encoding
|
|
664
|
+
* writeFileContent('./output/data.txt', 'Content', { encoding: 'utf-8' })
|
|
665
|
+
* ```
|
|
666
|
+
*/
|
|
667
|
+
function writeFileContent(filePath, content, options) {
|
|
668
|
+
try {
|
|
669
|
+
fsWriteLogger.debug('Writing file content', { path: filePath, size: content.length, encoding: options?.encoding ?? 'utf-8' });
|
|
670
|
+
ensureDir(dirname(filePath));
|
|
671
|
+
writeFileSync(filePath, content, {
|
|
672
|
+
encoding: options?.encoding ?? 'utf-8',
|
|
673
|
+
mode: options?.mode,
|
|
674
|
+
});
|
|
675
|
+
fsWriteLogger.debug('File written successfully', { path: filePath });
|
|
676
|
+
}
|
|
677
|
+
catch (error) {
|
|
678
|
+
fsWriteLogger.warn('Failed to write file', { path: filePath, error: error instanceof Error ? error.message : String(error) });
|
|
679
|
+
throw createFileSystemError(`Failed to write file: ${filePath}`, 'FS_WRITE_ERROR', { path: filePath, operation: 'write', cause: error });
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
/**
|
|
684
|
+
* Get file stats with error handling.
|
|
685
|
+
*
|
|
686
|
+
* @param filePath - Path to file
|
|
687
|
+
* @param followSymlinks - Whether to follow symlinks (default: true)
|
|
688
|
+
* @returns File stats or null if path doesn't exist
|
|
689
|
+
*/
|
|
690
|
+
function getFileStat(filePath, followSymlinks = true) {
|
|
691
|
+
if (!existsSync(filePath)) {
|
|
692
|
+
return null;
|
|
693
|
+
}
|
|
694
|
+
try {
|
|
695
|
+
const stat = followSymlinks ? statSync(filePath) : lstatSync(filePath);
|
|
696
|
+
return {
|
|
697
|
+
isFile: stat.isFile(),
|
|
698
|
+
isDirectory: stat.isDirectory(),
|
|
699
|
+
isSymlink: stat.isSymbolicLink(),
|
|
700
|
+
size: stat.size,
|
|
701
|
+
created: stat.birthtime,
|
|
702
|
+
modified: stat.mtime,
|
|
703
|
+
accessed: stat.atime,
|
|
704
|
+
mode: stat.mode,
|
|
705
|
+
};
|
|
706
|
+
}
|
|
707
|
+
catch {
|
|
708
|
+
return null;
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
/**
|
|
712
|
+
* Check if path is a directory.
|
|
713
|
+
*
|
|
714
|
+
* @param dirPath - Path to check
|
|
715
|
+
* @returns True if path is a directory
|
|
716
|
+
*/
|
|
717
|
+
function isDirectory(dirPath) {
|
|
718
|
+
const stats = getFileStat(dirPath);
|
|
719
|
+
return stats?.isDirectory ?? false;
|
|
720
|
+
}
|
|
721
|
+
/**
|
|
722
|
+
* Check if path exists.
|
|
723
|
+
*
|
|
724
|
+
* @param filePath - Path to check
|
|
725
|
+
* @returns True if path exists
|
|
726
|
+
*/
|
|
727
|
+
function exists(filePath) {
|
|
728
|
+
return existsSync(filePath);
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
const fsDirLogger = createScopedLogger('project-scope:fs:dir');
|
|
732
|
+
/**
|
|
733
|
+
* List immediate contents of a directory.
|
|
734
|
+
*
|
|
735
|
+
* @param dirPath - Absolute or relative path to the directory
|
|
736
|
+
* @returns Array of entries with metadata for each file/directory
|
|
737
|
+
* @throws {Error} If directory doesn't exist or isn't a directory
|
|
738
|
+
*
|
|
739
|
+
* @example
|
|
740
|
+
* ```typescript
|
|
741
|
+
* import { readDirectory } from '@hyperfrontend/project-scope'
|
|
742
|
+
*
|
|
743
|
+
* const entries = readDirectory('./src')
|
|
744
|
+
* for (const entry of entries) {
|
|
745
|
+
* console.log(entry.name, entry.isFile ? 'file' : 'directory')
|
|
746
|
+
* }
|
|
747
|
+
* ```
|
|
748
|
+
*/
|
|
749
|
+
function readDirectory(dirPath) {
|
|
750
|
+
fsDirLogger.debug('Reading directory', { path: dirPath });
|
|
751
|
+
if (!existsSync(dirPath)) {
|
|
752
|
+
fsDirLogger.debug('Directory not found', { path: dirPath });
|
|
753
|
+
throw createFileSystemError(`Directory not found: ${dirPath}`, 'FS_NOT_FOUND', { path: dirPath, operation: 'readdir' });
|
|
754
|
+
}
|
|
755
|
+
if (!isDirectory(dirPath)) {
|
|
756
|
+
fsDirLogger.debug('Path is not a directory', { path: dirPath });
|
|
757
|
+
throw createFileSystemError(`Not a directory: ${dirPath}`, 'FS_NOT_A_DIRECTORY', { path: dirPath, operation: 'readdir' });
|
|
758
|
+
}
|
|
759
|
+
try {
|
|
760
|
+
const entries = readdirSync(dirPath, { withFileTypes: true });
|
|
761
|
+
fsDirLogger.debug('Directory read complete', { path: dirPath, entryCount: entries.length });
|
|
762
|
+
return entries.map((entry) => ({
|
|
763
|
+
name: entry.name,
|
|
764
|
+
path: join$1(dirPath, entry.name),
|
|
765
|
+
isFile: entry.isFile(),
|
|
766
|
+
isDirectory: entry.isDirectory(),
|
|
767
|
+
isSymlink: entry.isSymbolicLink(),
|
|
768
|
+
}));
|
|
769
|
+
}
|
|
770
|
+
catch (error) {
|
|
771
|
+
fsDirLogger.warn('Failed to read directory', { path: dirPath, error: error instanceof Error ? error.message : String(error) });
|
|
772
|
+
throw createFileSystemError(`Failed to read directory: ${dirPath}`, 'FS_READ_ERROR', {
|
|
773
|
+
path: dirPath,
|
|
774
|
+
operation: 'readdir',
|
|
775
|
+
cause: error,
|
|
776
|
+
});
|
|
777
|
+
}
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
/**
|
|
781
|
+
* Join path segments.
|
|
782
|
+
* Uses platform-specific separators (e.g., / or \).
|
|
783
|
+
*
|
|
784
|
+
* @param paths - Path segments to join
|
|
785
|
+
* @returns Joined path
|
|
786
|
+
*/
|
|
787
|
+
function join(...paths) {
|
|
788
|
+
return join$1(...paths);
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
const fsTraversalLogger = createScopedLogger('project-scope:fs:traversal');
|
|
792
|
+
/**
|
|
793
|
+
* Generic upward directory traversal.
|
|
794
|
+
* Name avoids similarity to fs.readdir/fs.readdirSync.
|
|
795
|
+
*
|
|
796
|
+
* @param startPath - Starting directory
|
|
797
|
+
* @param predicate - Function to test each directory
|
|
798
|
+
* @returns First matching directory or null
|
|
799
|
+
*/
|
|
800
|
+
function traverseUpward(startPath, predicate) {
|
|
801
|
+
fsTraversalLogger.debug('Starting upward traversal', { startPath });
|
|
802
|
+
let currentPath = resolve(startPath);
|
|
803
|
+
const rootPath = parse$1(currentPath).root;
|
|
804
|
+
while (currentPath !== rootPath) {
|
|
805
|
+
if (predicate(currentPath)) {
|
|
806
|
+
fsTraversalLogger.debug('Upward traversal found match', { startPath, foundPath: currentPath });
|
|
807
|
+
return currentPath;
|
|
808
|
+
}
|
|
809
|
+
currentPath = dirname(currentPath);
|
|
810
|
+
}
|
|
811
|
+
// Check root directory
|
|
812
|
+
if (predicate(rootPath)) {
|
|
813
|
+
fsTraversalLogger.debug('Upward traversal found match at root', { startPath, foundPath: rootPath });
|
|
814
|
+
return rootPath;
|
|
815
|
+
}
|
|
816
|
+
fsTraversalLogger.debug('Upward traversal found no match', { startPath });
|
|
817
|
+
return null;
|
|
818
|
+
}
|
|
819
|
+
/**
|
|
820
|
+
* Find directory containing any of the specified marker files.
|
|
821
|
+
*
|
|
822
|
+
* @param startPath - Starting directory
|
|
823
|
+
* @param markers - Array of marker file names to search for
|
|
824
|
+
* @returns First directory containing any marker, or null
|
|
825
|
+
*/
|
|
826
|
+
function locateByMarkers(startPath, markers) {
|
|
827
|
+
fsTraversalLogger.debug('Locating by markers', { startPath, markers });
|
|
828
|
+
const result = traverseUpward(startPath, (dir) => markers.some((marker) => exists(join(dir, marker))));
|
|
829
|
+
if (result) {
|
|
830
|
+
fsTraversalLogger.debug('Found directory with marker', { startPath, foundPath: result });
|
|
831
|
+
}
|
|
832
|
+
return result;
|
|
833
|
+
}
|
|
834
|
+
/**
|
|
835
|
+
* Find directory where predicate returns true, starting from given path.
|
|
836
|
+
*
|
|
837
|
+
* @param startPath - Starting directory
|
|
838
|
+
* @param test - Function to test if directory matches criteria
|
|
839
|
+
* @returns Matching directory path or null
|
|
840
|
+
*/
|
|
841
|
+
function findUpwardWhere(startPath, test) {
|
|
842
|
+
fsTraversalLogger.debug('Finding upward where condition met', { startPath });
|
|
843
|
+
return traverseUpward(startPath, test);
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
/**
|
|
847
|
+
* Create a structured error with code and optional context.
|
|
848
|
+
*
|
|
849
|
+
* @param message - The human-readable error message
|
|
850
|
+
* @param code - The machine-readable error code for programmatic handling
|
|
851
|
+
* @param context - Additional contextual information about the error
|
|
852
|
+
* @returns Structured error instance with code and context properties
|
|
853
|
+
*
|
|
854
|
+
* @example
|
|
855
|
+
* ```typescript
|
|
856
|
+
* import { createStructuredError } from '@hyperfrontend/project-scope'
|
|
857
|
+
*
|
|
858
|
+
* throw createStructuredError(
|
|
859
|
+
* 'Configuration file not found',
|
|
860
|
+
* 'CONFIG_NOT_FOUND',
|
|
861
|
+
* { path: './config.json', searched: ['./config.json', './settings.json'] }
|
|
862
|
+
* )
|
|
863
|
+
* ```
|
|
864
|
+
*/
|
|
865
|
+
function createStructuredError(message, code, context) {
|
|
866
|
+
const error = createError(message);
|
|
867
|
+
error.code = code;
|
|
868
|
+
error.context = context ?? {};
|
|
869
|
+
return error;
|
|
870
|
+
}
|
|
871
|
+
/**
|
|
872
|
+
* Create a configuration-related error.
|
|
873
|
+
*
|
|
874
|
+
* @param message - The human-readable error message
|
|
875
|
+
* @param code - The machine-readable error code for programmatic handling
|
|
876
|
+
* @param context - Additional contextual information (e.g., file path, config key)
|
|
877
|
+
* @returns Structured error instance tagged with type 'config'
|
|
878
|
+
*/
|
|
879
|
+
function createConfigError(message, code, context) {
|
|
880
|
+
return createStructuredError(message, code, { ...context, type: 'config' });
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
const packageLogger = createScopedLogger('project-scope:project:package');
|
|
884
|
+
/**
|
|
885
|
+
* Verifies that a value is an object with only string values,
|
|
886
|
+
* used for validating dependency maps and script definitions.
|
|
887
|
+
*
|
|
888
|
+
* @param value - Value to check
|
|
889
|
+
* @returns True if value is a record of strings
|
|
890
|
+
*/
|
|
891
|
+
function isStringRecord(value) {
|
|
892
|
+
if (typeof value !== 'object' || value === null)
|
|
893
|
+
return false;
|
|
894
|
+
return values(value).every((v) => typeof v === 'string');
|
|
895
|
+
}
|
|
896
|
+
/**
|
|
897
|
+
* Extracts and normalizes the workspaces field from package.json,
|
|
898
|
+
* supporting both array format and object with packages array.
|
|
899
|
+
*
|
|
900
|
+
* @param value - Raw workspaces value from package.json
|
|
901
|
+
* @returns Normalized workspace patterns or undefined if invalid
|
|
902
|
+
*/
|
|
903
|
+
function parseWorkspaces(value) {
|
|
904
|
+
if (isArray(value) && value.every((v) => typeof v === 'string')) {
|
|
905
|
+
return value;
|
|
906
|
+
}
|
|
907
|
+
if (typeof value === 'object' && value !== null) {
|
|
908
|
+
const obj = value;
|
|
909
|
+
if (isArray(obj['packages'])) {
|
|
910
|
+
return { packages: obj['packages'] };
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
return undefined;
|
|
914
|
+
}
|
|
915
|
+
/**
|
|
916
|
+
* Validate and normalize package.json data.
|
|
917
|
+
*
|
|
918
|
+
* @param data - Raw parsed data
|
|
919
|
+
* @returns Validated package.json
|
|
920
|
+
*/
|
|
921
|
+
function validatePackageJson(data) {
|
|
922
|
+
if (typeof data !== 'object' || data === null) {
|
|
923
|
+
throw createError('package.json must be an object');
|
|
924
|
+
}
|
|
925
|
+
const pkg = data;
|
|
926
|
+
return {
|
|
927
|
+
name: typeof pkg['name'] === 'string' ? pkg['name'] : undefined,
|
|
928
|
+
version: typeof pkg['version'] === 'string' ? pkg['version'] : undefined,
|
|
929
|
+
description: typeof pkg['description'] === 'string' ? pkg['description'] : undefined,
|
|
930
|
+
main: typeof pkg['main'] === 'string' ? pkg['main'] : undefined,
|
|
931
|
+
module: typeof pkg['module'] === 'string' ? pkg['module'] : undefined,
|
|
932
|
+
browser: typeof pkg['browser'] === 'string' ? pkg['browser'] : undefined,
|
|
933
|
+
types: typeof pkg['types'] === 'string' ? pkg['types'] : undefined,
|
|
934
|
+
bin: typeof pkg['bin'] === 'string' || isStringRecord(pkg['bin']) ? pkg['bin'] : undefined,
|
|
935
|
+
scripts: isStringRecord(pkg['scripts']) ? pkg['scripts'] : undefined,
|
|
936
|
+
dependencies: isStringRecord(pkg['dependencies']) ? pkg['dependencies'] : undefined,
|
|
937
|
+
devDependencies: isStringRecord(pkg['devDependencies']) ? pkg['devDependencies'] : undefined,
|
|
938
|
+
peerDependencies: isStringRecord(pkg['peerDependencies']) ? pkg['peerDependencies'] : undefined,
|
|
939
|
+
optionalDependencies: isStringRecord(pkg['optionalDependencies']) ? pkg['optionalDependencies'] : undefined,
|
|
940
|
+
workspaces: parseWorkspaces(pkg['workspaces']),
|
|
941
|
+
exports: typeof pkg['exports'] === 'object' ? pkg['exports'] : undefined,
|
|
942
|
+
engines: isStringRecord(pkg['engines']) ? pkg['engines'] : undefined,
|
|
943
|
+
...pkg,
|
|
944
|
+
};
|
|
945
|
+
}
|
|
946
|
+
/**
|
|
947
|
+
* Reads and parses package.json from a directory, validating
|
|
948
|
+
* the structure and normalizing fields to the PackageJson interface.
|
|
949
|
+
*
|
|
950
|
+
* @param projectPath - Project directory path or path to package.json
|
|
951
|
+
* @returns Parsed package.json
|
|
952
|
+
* @throws {Error} Error if file doesn't exist or is invalid
|
|
953
|
+
*/
|
|
954
|
+
function readPackageJson(projectPath) {
|
|
955
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join$1(projectPath, 'package.json');
|
|
956
|
+
packageLogger.debug('Reading package.json', { path: packageJsonPath });
|
|
957
|
+
const content = readFileContent(packageJsonPath);
|
|
958
|
+
try {
|
|
959
|
+
const data = parse(content);
|
|
960
|
+
const validated = validatePackageJson(data);
|
|
961
|
+
packageLogger.debug('Package.json read successfully', { path: packageJsonPath, name: validated.name });
|
|
962
|
+
return validated;
|
|
963
|
+
}
|
|
964
|
+
catch (error) {
|
|
965
|
+
packageLogger.warn('Failed to parse package.json', {
|
|
966
|
+
path: packageJsonPath,
|
|
967
|
+
error: error instanceof Error ? error.message : String(error),
|
|
968
|
+
});
|
|
969
|
+
throw createConfigError(`Failed to parse package.json: ${packageJsonPath}`, 'CONFIG_PARSE_ERROR', {
|
|
970
|
+
filePath: packageJsonPath,
|
|
971
|
+
cause: error,
|
|
972
|
+
});
|
|
973
|
+
}
|
|
974
|
+
}
|
|
975
|
+
/**
|
|
976
|
+
* Attempts to read and parse package.json if it exists,
|
|
977
|
+
* returning null on missing file or parse failure.
|
|
978
|
+
*
|
|
979
|
+
* @param projectPath - Project directory path or path to package.json
|
|
980
|
+
* @returns Parsed package.json or null if not found
|
|
981
|
+
*/
|
|
982
|
+
function readPackageJsonIfExists(projectPath) {
|
|
983
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join$1(projectPath, 'package.json');
|
|
984
|
+
const content = readFileIfExists(packageJsonPath);
|
|
985
|
+
if (!content) {
|
|
986
|
+
packageLogger.debug('Package.json not found', { path: packageJsonPath });
|
|
987
|
+
return null;
|
|
988
|
+
}
|
|
989
|
+
try {
|
|
990
|
+
const validated = validatePackageJson(parse(content));
|
|
991
|
+
packageLogger.debug('Package.json loaded', { path: packageJsonPath, name: validated.name });
|
|
992
|
+
return validated;
|
|
993
|
+
}
|
|
994
|
+
catch {
|
|
995
|
+
packageLogger.debug('Failed to parse package.json, returning null', { path: packageJsonPath });
|
|
996
|
+
return null;
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
/**
|
|
1000
|
+
* Find nearest package.json by walking up the directory tree.
|
|
1001
|
+
*
|
|
1002
|
+
* @param startPath - Starting path
|
|
1003
|
+
* @returns Path to directory containing package.json, or null if not found
|
|
1004
|
+
*/
|
|
1005
|
+
function findNearestPackageJson(startPath) {
|
|
1006
|
+
return locateByMarkers(startPath, ['package.json']);
|
|
1007
|
+
}
|
|
1008
|
+
|
|
1009
|
+
createScopedLogger('project-scope:heuristics:deps');
|
|
1010
|
+
|
|
1011
|
+
/**
|
|
1012
|
+
* Global registry of all caches for bulk operations.
|
|
1013
|
+
*/
|
|
1014
|
+
const cacheRegistry = createSet();
|
|
1015
|
+
/**
|
|
1016
|
+
* Create a cache with optional TTL and size limits.
|
|
1017
|
+
*
|
|
1018
|
+
* The cache provides a simple key-value store with:
|
|
1019
|
+
* - Optional TTL (time-to-live) for automatic expiration
|
|
1020
|
+
* - Optional maxSize for limiting cache size with FIFO eviction
|
|
1021
|
+
* - Lazy expiration (entries are checked on access)
|
|
1022
|
+
*
|
|
1023
|
+
* @param options - Cache configuration options
|
|
1024
|
+
* @returns Cache instance
|
|
1025
|
+
*
|
|
1026
|
+
* @example
|
|
1027
|
+
* ```typescript
|
|
1028
|
+
* // Basic cache
|
|
1029
|
+
* const cache = createCache<string, number>()
|
|
1030
|
+
* cache.set('answer', 42)
|
|
1031
|
+
* cache.get('answer') // 42
|
|
1032
|
+
*
|
|
1033
|
+
* // Cache with TTL (expires after 60 seconds)
|
|
1034
|
+
* const ttlCache = createCache<string, object>({ ttl: 60000 })
|
|
1035
|
+
*
|
|
1036
|
+
* // Cache with max size (evicts oldest when full)
|
|
1037
|
+
* const lruCache = createCache<string, object>({ maxSize: 100 })
|
|
1038
|
+
*
|
|
1039
|
+
* // Combined options
|
|
1040
|
+
* const configCache = createCache<string, object>({
|
|
1041
|
+
* ttl: 30000,
|
|
1042
|
+
* maxSize: 50
|
|
1043
|
+
* })
|
|
1044
|
+
* ```
|
|
1045
|
+
*/
|
|
1046
|
+
function createCache(options) {
|
|
1047
|
+
const { ttl, maxSize } = options ?? {};
|
|
1048
|
+
const store = createMap();
|
|
1049
|
+
// Track insertion order for FIFO eviction
|
|
1050
|
+
const insertionOrder = [];
|
|
1051
|
+
/**
|
|
1052
|
+
* Check if an entry is expired.
|
|
1053
|
+
*
|
|
1054
|
+
* @param entry - Cache entry to check
|
|
1055
|
+
* @returns True if entry is expired
|
|
1056
|
+
*/
|
|
1057
|
+
function isExpired(entry) {
|
|
1058
|
+
if (ttl === undefined)
|
|
1059
|
+
return false;
|
|
1060
|
+
// eslint-disable-next-line workspace/no-unsafe-builtin-methods -- Date.now() is needed for Jest fake timers compatibility
|
|
1061
|
+
return Date.now() - entry.timestamp > ttl;
|
|
1062
|
+
}
|
|
1063
|
+
/**
|
|
1064
|
+
* Evict oldest entries to make room for new ones.
|
|
1065
|
+
*/
|
|
1066
|
+
function evictIfNeeded() {
|
|
1067
|
+
if (maxSize === undefined)
|
|
1068
|
+
return;
|
|
1069
|
+
while (store.size >= maxSize && insertionOrder.length > 0) {
|
|
1070
|
+
const oldestKey = insertionOrder.shift();
|
|
1071
|
+
if (oldestKey !== undefined) {
|
|
1072
|
+
store.delete(oldestKey);
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
/**
|
|
1077
|
+
* Remove key from insertion order tracking.
|
|
1078
|
+
*
|
|
1079
|
+
* @param key - Key to remove from order tracking
|
|
1080
|
+
*/
|
|
1081
|
+
function removeFromOrder(key) {
|
|
1082
|
+
const index = insertionOrder.indexOf(key);
|
|
1083
|
+
if (index !== -1) {
|
|
1084
|
+
insertionOrder.splice(index, 1);
|
|
1085
|
+
}
|
|
1086
|
+
}
|
|
1087
|
+
const cache = {
|
|
1088
|
+
get(key) {
|
|
1089
|
+
const entry = store.get(key);
|
|
1090
|
+
if (!entry)
|
|
1091
|
+
return undefined;
|
|
1092
|
+
if (isExpired(entry)) {
|
|
1093
|
+
store.delete(key);
|
|
1094
|
+
removeFromOrder(key);
|
|
1095
|
+
return undefined;
|
|
1096
|
+
}
|
|
1097
|
+
return entry.value;
|
|
1098
|
+
},
|
|
1099
|
+
set(key, value) {
|
|
1100
|
+
// If key exists, remove from order first
|
|
1101
|
+
if (store.has(key)) {
|
|
1102
|
+
removeFromOrder(key);
|
|
1103
|
+
}
|
|
1104
|
+
else {
|
|
1105
|
+
// Evict if needed before adding new entry
|
|
1106
|
+
evictIfNeeded();
|
|
1107
|
+
}
|
|
1108
|
+
// eslint-disable-next-line workspace/no-unsafe-builtin-methods -- Date.now() is needed for Jest fake timers compatibility
|
|
1109
|
+
store.set(key, { value, timestamp: Date.now() });
|
|
1110
|
+
insertionOrder.push(key);
|
|
1111
|
+
},
|
|
1112
|
+
has(key) {
|
|
1113
|
+
const entry = store.get(key);
|
|
1114
|
+
if (!entry)
|
|
1115
|
+
return false;
|
|
1116
|
+
if (isExpired(entry)) {
|
|
1117
|
+
store.delete(key);
|
|
1118
|
+
removeFromOrder(key);
|
|
1119
|
+
return false;
|
|
1120
|
+
}
|
|
1121
|
+
return true;
|
|
1122
|
+
},
|
|
1123
|
+
delete(key) {
|
|
1124
|
+
removeFromOrder(key);
|
|
1125
|
+
return store.delete(key);
|
|
1126
|
+
},
|
|
1127
|
+
clear() {
|
|
1128
|
+
store.clear();
|
|
1129
|
+
insertionOrder.length = 0;
|
|
1130
|
+
},
|
|
1131
|
+
size() {
|
|
1132
|
+
return store.size;
|
|
1133
|
+
},
|
|
1134
|
+
keys() {
|
|
1135
|
+
return [...insertionOrder];
|
|
1136
|
+
},
|
|
1137
|
+
};
|
|
1138
|
+
// Register cache for global operations
|
|
1139
|
+
cacheRegistry.add(cache);
|
|
1140
|
+
return freeze(cache);
|
|
1141
|
+
}
|
|
1142
|
+
|
|
1143
|
+
/**
|
|
1144
|
+
* Pattern matching utilities with ReDoS protection.
|
|
1145
|
+
* Uses character-by-character matching instead of regex where possible.
|
|
1146
|
+
*/
|
|
1147
|
+
/**
|
|
1148
|
+
* Match path against glob pattern using safe character iteration.
|
|
1149
|
+
* Avoids regex to prevent ReDoS attacks.
|
|
1150
|
+
*
|
|
1151
|
+
* Supported patterns:
|
|
1152
|
+
* - * matches any characters except /
|
|
1153
|
+
* - ** matches any characters including /
|
|
1154
|
+
* - ? matches exactly one character except /
|
|
1155
|
+
* - {a,b,c} matches any of the alternatives
|
|
1156
|
+
*
|
|
1157
|
+
* @param path - The filesystem path to test against the pattern
|
|
1158
|
+
* @param pattern - The glob pattern to match against
|
|
1159
|
+
* @returns True if path matches pattern
|
|
1160
|
+
*
|
|
1161
|
+
* @example
|
|
1162
|
+
* ```typescript
|
|
1163
|
+
* import { matchGlobPattern } from '@hyperfrontend/project-scope'
|
|
1164
|
+
*
|
|
1165
|
+
* matchGlobPattern('src/utils/helper.ts', '\*\*\/*.ts') // true
|
|
1166
|
+
* matchGlobPattern('test.spec.ts', '\*.spec.ts') // true
|
|
1167
|
+
* matchGlobPattern('config.json', '\*.{json,yaml}') // true
|
|
1168
|
+
* matchGlobPattern('src/index.ts', 'src/\*.ts') // true
|
|
1169
|
+
* ```
|
|
1170
|
+
*/
|
|
1171
|
+
function matchGlobPattern(path, pattern) {
|
|
1172
|
+
return matchSegments(path.split('/'), pattern.split('/'), 0, 0);
|
|
1173
|
+
}
|
|
1174
|
+
/**
|
|
1175
|
+
* Internal recursive function to match path segments against pattern segments.
|
|
1176
|
+
*
|
|
1177
|
+
* @param pathParts - Array of path segments split by '/'
|
|
1178
|
+
* @param patternParts - Array of pattern segments split by '/'
|
|
1179
|
+
* @param pathIdx - Current index in pathParts being examined
|
|
1180
|
+
* @param patternIdx - Current index in patternParts being examined
|
|
1181
|
+
* @returns True if remaining segments match
|
|
1182
|
+
*/
|
|
1183
|
+
function matchSegments(pathParts, patternParts, pathIdx, patternIdx) {
|
|
1184
|
+
// Base cases
|
|
1185
|
+
if (pathIdx === pathParts.length && patternIdx === patternParts.length) {
|
|
1186
|
+
return true; // Both exhausted = match
|
|
1187
|
+
}
|
|
1188
|
+
if (patternIdx >= patternParts.length) {
|
|
1189
|
+
return false; // Pattern exhausted but path remains
|
|
1190
|
+
}
|
|
1191
|
+
const patternPart = patternParts[patternIdx];
|
|
1192
|
+
// Handle ** (globstar) - matches zero or more directories
|
|
1193
|
+
if (patternPart === '**') {
|
|
1194
|
+
// Try matching rest of pattern against current position and all future positions
|
|
1195
|
+
for (let i = pathIdx; i <= pathParts.length; i++) {
|
|
1196
|
+
if (matchSegments(pathParts, patternParts, i, patternIdx + 1)) {
|
|
1197
|
+
return true;
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
return false;
|
|
1201
|
+
}
|
|
1202
|
+
if (pathIdx >= pathParts.length) {
|
|
1203
|
+
return false; // Path exhausted but pattern remains (and it's not **)
|
|
1204
|
+
}
|
|
1205
|
+
const pathPart = pathParts[pathIdx];
|
|
1206
|
+
// Match current segment
|
|
1207
|
+
if (matchSegment(pathPart, patternPart)) {
|
|
1208
|
+
return matchSegments(pathParts, patternParts, pathIdx + 1, patternIdx + 1);
|
|
1209
|
+
}
|
|
1210
|
+
return false;
|
|
1211
|
+
}
|
|
1212
|
+
/**
|
|
1213
|
+
* Match a single path segment against a pattern segment.
|
|
1214
|
+
* Handles *, ?, and {a,b,c} patterns.
|
|
1215
|
+
*
|
|
1216
|
+
* @param text - The path segment text to match
|
|
1217
|
+
* @param pattern - The pattern segment to match against
|
|
1218
|
+
* @returns True if the text matches the pattern
|
|
1219
|
+
*/
|
|
1220
|
+
function matchSegment(text, pattern) {
|
|
1221
|
+
let textIdx = 0;
|
|
1222
|
+
let patternIdx = 0;
|
|
1223
|
+
while (patternIdx < pattern.length) {
|
|
1224
|
+
const char = pattern[patternIdx];
|
|
1225
|
+
if (char === '*') {
|
|
1226
|
+
// * matches zero or more characters
|
|
1227
|
+
patternIdx++;
|
|
1228
|
+
if (patternIdx === pattern.length) {
|
|
1229
|
+
return true; // * at end matches rest of string
|
|
1230
|
+
}
|
|
1231
|
+
// Try matching rest of pattern at each position in text
|
|
1232
|
+
for (let i = textIdx; i <= text.length; i++) {
|
|
1233
|
+
if (matchSegmentFrom(text, i, pattern, patternIdx)) {
|
|
1234
|
+
return true;
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
return false;
|
|
1238
|
+
}
|
|
1239
|
+
else if (char === '?') {
|
|
1240
|
+
// ? matches exactly one character
|
|
1241
|
+
if (textIdx >= text.length) {
|
|
1242
|
+
return false;
|
|
1243
|
+
}
|
|
1244
|
+
textIdx++;
|
|
1245
|
+
patternIdx++;
|
|
1246
|
+
}
|
|
1247
|
+
else if (char === '{') {
|
|
1248
|
+
// {a,b,c} matches any alternative
|
|
1249
|
+
const closeIdx = findClosingBrace(pattern, patternIdx);
|
|
1250
|
+
if (closeIdx === -1) {
|
|
1251
|
+
// Unmatched brace, treat as literal
|
|
1252
|
+
if (textIdx >= text.length || text[textIdx] !== char) {
|
|
1253
|
+
return false;
|
|
1254
|
+
}
|
|
1255
|
+
textIdx++;
|
|
1256
|
+
patternIdx++;
|
|
1257
|
+
}
|
|
1258
|
+
else {
|
|
1259
|
+
const alternatives = extractAlternatives(pattern.slice(patternIdx + 1, closeIdx));
|
|
1260
|
+
for (const alt of alternatives) {
|
|
1261
|
+
if (matchSegmentFrom(text, textIdx, text.slice(0, textIdx) + alt + pattern.slice(closeIdx + 1), textIdx)) {
|
|
1262
|
+
return true;
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
return false;
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
else {
|
|
1269
|
+
// Literal character
|
|
1270
|
+
if (textIdx >= text.length || text[textIdx] !== char) {
|
|
1271
|
+
return false;
|
|
1272
|
+
}
|
|
1273
|
+
textIdx++;
|
|
1274
|
+
patternIdx++;
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
return textIdx === text.length;
|
|
1278
|
+
}
|
|
1279
|
+
/**
|
|
1280
|
+
* Helper to match from a specific position.
|
|
1281
|
+
*
|
|
1282
|
+
* @param text - The full text being matched
|
|
1283
|
+
* @param textIdx - The starting index in text to match from
|
|
1284
|
+
* @param pattern - The full pattern being matched
|
|
1285
|
+
* @param patternIdx - The starting index in pattern to match from
|
|
1286
|
+
* @returns True if the text matches the pattern from the given positions
|
|
1287
|
+
*/
|
|
1288
|
+
function matchSegmentFrom(text, textIdx, pattern, patternIdx) {
|
|
1289
|
+
const remainingText = text.slice(textIdx);
|
|
1290
|
+
const remainingPattern = pattern.slice(patternIdx);
|
|
1291
|
+
return matchSegment(remainingText, remainingPattern);
|
|
1292
|
+
}
|
|
1293
|
+
/**
|
|
1294
|
+
* Find closing brace for {a,b,c} pattern.
|
|
1295
|
+
*
|
|
1296
|
+
* @param pattern - The pattern string to search within
|
|
1297
|
+
* @param startIdx - The index of the opening brace
|
|
1298
|
+
* @returns The index of the matching closing brace, or -1 if not found
|
|
1299
|
+
*/
|
|
1300
|
+
function findClosingBrace(pattern, startIdx) {
|
|
1301
|
+
let depth = 0;
|
|
1302
|
+
for (let i = startIdx; i < pattern.length; i++) {
|
|
1303
|
+
if (pattern[i] === '{') {
|
|
1304
|
+
depth++;
|
|
1305
|
+
}
|
|
1306
|
+
else if (pattern[i] === '}') {
|
|
1307
|
+
depth--;
|
|
1308
|
+
if (depth === 0) {
|
|
1309
|
+
return i;
|
|
1310
|
+
}
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
return -1;
|
|
1314
|
+
}
|
|
1315
|
+
/**
|
|
1316
|
+
* Extract alternatives from {a,b,c} pattern content.
|
|
1317
|
+
*
|
|
1318
|
+
* @param content - The content between braces (without the braces themselves)
|
|
1319
|
+
* @returns Array of alternative strings split by commas at depth 0
|
|
1320
|
+
*/
|
|
1321
|
+
function extractAlternatives(content) {
|
|
1322
|
+
const alternatives = [];
|
|
1323
|
+
let current = '';
|
|
1324
|
+
let depth = 0;
|
|
1325
|
+
for (let i = 0; i < content.length; i++) {
|
|
1326
|
+
const char = content[i];
|
|
1327
|
+
if (char === '{') {
|
|
1328
|
+
depth++;
|
|
1329
|
+
current += char;
|
|
1330
|
+
}
|
|
1331
|
+
else if (char === '}') {
|
|
1332
|
+
depth--;
|
|
1333
|
+
current += char;
|
|
1334
|
+
}
|
|
1335
|
+
else if (char === ',' && depth === 0) {
|
|
1336
|
+
alternatives.push(current);
|
|
1337
|
+
current = '';
|
|
1338
|
+
}
|
|
1339
|
+
else {
|
|
1340
|
+
current += char;
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
if (current) {
|
|
1344
|
+
alternatives.push(current);
|
|
1345
|
+
}
|
|
1346
|
+
return alternatives;
|
|
1347
|
+
}
|
|
1348
|
+
|
|
1349
|
+
const walkLogger = createScopedLogger('project-scope:project:walk');
|
|
1350
|
+
/**
|
|
1351
|
+
* Reads .gitignore file from the given directory and extracts
|
|
1352
|
+
* non-comment patterns for use in file traversal filtering.
|
|
1353
|
+
*
|
|
1354
|
+
* @param startPath - Directory containing the .gitignore file
|
|
1355
|
+
* @returns Array of gitignore patterns
|
|
1356
|
+
*/
|
|
1357
|
+
function loadGitignorePatterns(startPath) {
|
|
1358
|
+
const patterns = [];
|
|
1359
|
+
const gitignorePath = join$1(startPath, '.gitignore');
|
|
1360
|
+
const content = readFileIfExists(gitignorePath);
|
|
1361
|
+
if (content) {
|
|
1362
|
+
const lines = content.split('\n');
|
|
1363
|
+
for (const line of lines) {
|
|
1364
|
+
const trimmed = line.trim();
|
|
1365
|
+
if (trimmed && !trimmed.startsWith('#')) {
|
|
1366
|
+
patterns.push(trimmed);
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
}
|
|
1370
|
+
return patterns;
|
|
1371
|
+
}
|
|
1372
|
+
/**
|
|
1373
|
+
* Evaluates whether a relative path should be ignored based on
|
|
1374
|
+
* a list of gitignore-style patterns.
|
|
1375
|
+
*
|
|
1376
|
+
* @param relativePath - Path relative to the root directory
|
|
1377
|
+
* @param patterns - Array of gitignore-style patterns to test
|
|
1378
|
+
* @returns True if the path matches any ignore pattern
|
|
1379
|
+
*/
|
|
1380
|
+
function matchesIgnorePattern(relativePath, patterns) {
|
|
1381
|
+
for (const pattern of patterns) {
|
|
1382
|
+
if (matchPattern(relativePath, pattern)) {
|
|
1383
|
+
return true;
|
|
1384
|
+
}
|
|
1385
|
+
}
|
|
1386
|
+
return false;
|
|
1387
|
+
}
|
|
1388
|
+
/**
|
|
1389
|
+
* Tests if the given path matches a gitignore-style pattern,
|
|
1390
|
+
* supporting negation patterns with '!' prefix.
|
|
1391
|
+
* Uses safe character-by-character matching to prevent ReDoS attacks.
|
|
1392
|
+
*
|
|
1393
|
+
* @param path - File or directory path to test
|
|
1394
|
+
* @param pattern - Gitignore-style pattern (may include wildcards)
|
|
1395
|
+
* @returns True if the path matches the pattern (or doesn't match if negated)
|
|
1396
|
+
*/
|
|
1397
|
+
function matchPattern(path, pattern) {
|
|
1398
|
+
const normalizedPattern = pattern.startsWith('/') ? pattern.slice(1) : pattern;
|
|
1399
|
+
const isNegation = normalizedPattern.startsWith('!');
|
|
1400
|
+
const actualPattern = isNegation ? normalizedPattern.slice(1) : normalizedPattern;
|
|
1401
|
+
const matchesFullPath = matchGlobPattern(path, actualPattern) || matchGlobPattern(path, `**/${actualPattern}`);
|
|
1402
|
+
const matchesSegment = path.split('/').some((segment) => matchGlobPattern(segment, actualPattern));
|
|
1403
|
+
const matches = matchesFullPath || matchesSegment;
|
|
1404
|
+
return isNegation ? !matches : matches;
|
|
1405
|
+
}
|
|
1406
|
+
/**
|
|
1407
|
+
* Traverses a directory tree synchronously, calling a visitor function
|
|
1408
|
+
* for each file and directory encountered. Supports depth limiting,
|
|
1409
|
+
* hidden file filtering, and gitignore pattern matching.
|
|
1410
|
+
*
|
|
1411
|
+
* @param startPath - Root directory to begin traversal
|
|
1412
|
+
* @param visitor - Callback function invoked for each file system entry
|
|
1413
|
+
* @param options - Configuration for traversal behavior
|
|
1414
|
+
*/
|
|
1415
|
+
function walkDirectory(startPath, visitor, options) {
|
|
1416
|
+
walkLogger.debug('Starting directory walk', {
|
|
1417
|
+
startPath,
|
|
1418
|
+
maxDepth: options?.maxDepth ?? -1,
|
|
1419
|
+
includeHidden: options?.includeHidden ?? false,
|
|
1420
|
+
respectGitignore: options?.respectGitignore ?? true,
|
|
1421
|
+
ignorePatterns: options?.ignorePatterns?.length ?? 0,
|
|
1422
|
+
});
|
|
1423
|
+
const maxDepth = options?.maxDepth ?? -1;
|
|
1424
|
+
const includeHidden = options?.includeHidden ?? false;
|
|
1425
|
+
const ignorePatterns = options?.ignorePatterns ?? [];
|
|
1426
|
+
const respectGitignore = options?.respectGitignore ?? true;
|
|
1427
|
+
const gitignorePatterns = respectGitignore ? loadGitignorePatterns(startPath) : [];
|
|
1428
|
+
const allIgnorePatterns = [...ignorePatterns, ...gitignorePatterns];
|
|
1429
|
+
if (gitignorePatterns.length > 0) {
|
|
1430
|
+
walkLogger.debug('Loaded gitignore patterns', { count: gitignorePatterns.length });
|
|
1431
|
+
}
|
|
1432
|
+
/**
|
|
1433
|
+
* Recursively walks directory entries, applying visitor to each.
|
|
1434
|
+
*
|
|
1435
|
+
* @param currentPath - Absolute path to current directory
|
|
1436
|
+
* @param relativePath - Path relative to the starting directory
|
|
1437
|
+
* @param depth - Current recursion depth
|
|
1438
|
+
* @returns False to stop walking, true to continue
|
|
1439
|
+
*/
|
|
1440
|
+
function walk(currentPath, relativePath, depth) {
|
|
1441
|
+
if (maxDepth !== -1 && depth > maxDepth) {
|
|
1442
|
+
return true;
|
|
1443
|
+
}
|
|
1444
|
+
let entries;
|
|
1445
|
+
try {
|
|
1446
|
+
entries = readDirectory(currentPath);
|
|
1447
|
+
}
|
|
1448
|
+
catch {
|
|
1449
|
+
return true;
|
|
1450
|
+
}
|
|
1451
|
+
for (const entry of entries) {
|
|
1452
|
+
if (!includeHidden && entry.name.startsWith('.')) {
|
|
1453
|
+
continue;
|
|
1454
|
+
}
|
|
1455
|
+
const entryRelativePath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
1456
|
+
if (matchesIgnorePattern(entryRelativePath, allIgnorePatterns)) {
|
|
1457
|
+
continue;
|
|
1458
|
+
}
|
|
1459
|
+
const walkEntry = {
|
|
1460
|
+
name: entry.name,
|
|
1461
|
+
path: entry.path,
|
|
1462
|
+
relativePath: entryRelativePath,
|
|
1463
|
+
isFile: entry.isFile,
|
|
1464
|
+
isDirectory: entry.isDirectory,
|
|
1465
|
+
isSymlink: entry.isSymlink,
|
|
1466
|
+
depth,
|
|
1467
|
+
};
|
|
1468
|
+
const result = visitor(walkEntry);
|
|
1469
|
+
if (result === 'stop') {
|
|
1470
|
+
return false;
|
|
1471
|
+
}
|
|
1472
|
+
if (result === 'skip') {
|
|
1473
|
+
continue;
|
|
1474
|
+
}
|
|
1475
|
+
if (entry.isDirectory) {
|
|
1476
|
+
const shouldContinue = walk(entry.path, entryRelativePath, depth + 1);
|
|
1477
|
+
if (!shouldContinue) {
|
|
1478
|
+
return false;
|
|
1479
|
+
}
|
|
1480
|
+
}
|
|
1481
|
+
}
|
|
1482
|
+
return true;
|
|
1483
|
+
}
|
|
1484
|
+
walk(startPath, '', 0);
|
|
1485
|
+
walkLogger.debug('Directory walk complete', { startPath });
|
|
1486
|
+
}
|
|
1487
|
+
|
|
1488
|
+
const searchLogger = createScopedLogger('project-scope:project:search');
|
|
1489
|
+
/**
|
|
1490
|
+
* Tests if a path matches at least one pattern from an array of globs,
|
|
1491
|
+
* enabling flexible multi-pattern file filtering.
|
|
1492
|
+
* Uses safe character-by-character matching to prevent ReDoS attacks.
|
|
1493
|
+
*
|
|
1494
|
+
* @param path - File path to test
|
|
1495
|
+
* @param patterns - Array of glob patterns
|
|
1496
|
+
* @returns True if path matches any pattern
|
|
1497
|
+
*/
|
|
1498
|
+
function matchesPatterns(path, patterns) {
|
|
1499
|
+
return patterns.some((pattern) => matchGlobPattern(path, pattern));
|
|
1500
|
+
}
|
|
1501
|
+
/**
|
|
1502
|
+
* Searches a directory tree for files matching one or more glob patterns,
|
|
1503
|
+
* returning relative or absolute paths based on options.
|
|
1504
|
+
*
|
|
1505
|
+
* @param startPath - Root directory to begin the search
|
|
1506
|
+
* @param patterns - Glob patterns (e.g., '*.ts', '**\/*.json') to filter files
|
|
1507
|
+
* @param options - Configuration for search behavior
|
|
1508
|
+
* @returns List of relative file paths that match the patterns
|
|
1509
|
+
*
|
|
1510
|
+
* @example
|
|
1511
|
+
* ```typescript
|
|
1512
|
+
* import { findFiles } from '@hyperfrontend/project-scope'
|
|
1513
|
+
*
|
|
1514
|
+
* // Find all TypeScript files
|
|
1515
|
+
* const tsFiles = findFiles('./src', '\*\*\/*.ts')
|
|
1516
|
+
*
|
|
1517
|
+
* // Find multiple file types
|
|
1518
|
+
* const configFiles = findFiles('./', ['\*.json', '\*.yaml', '\*.yml'])
|
|
1519
|
+
*
|
|
1520
|
+
* // Limit results and get absolute paths
|
|
1521
|
+
* const first10 = findFiles('./src', '\*\*\/*.ts', {
|
|
1522
|
+
* maxResults: 10,
|
|
1523
|
+
* absolutePaths: true
|
|
1524
|
+
* })
|
|
1525
|
+
* ```
|
|
1526
|
+
*/
|
|
1527
|
+
function findFiles(startPath, patterns, options) {
|
|
1528
|
+
const normalizedPatterns = isArray(patterns) ? patterns : [patterns];
|
|
1529
|
+
searchLogger.debug('Finding files', { startPath, patterns: normalizedPatterns, maxResults: options?.maxResults });
|
|
1530
|
+
const results = [];
|
|
1531
|
+
const maxResults = options?.maxResults ?? Infinity;
|
|
1532
|
+
walkDirectory(startPath, (entry) => {
|
|
1533
|
+
if (results.length >= maxResults) {
|
|
1534
|
+
return 'stop';
|
|
1535
|
+
}
|
|
1536
|
+
if (!entry.isFile) {
|
|
1537
|
+
return undefined;
|
|
1538
|
+
}
|
|
1539
|
+
if (matchesPatterns(entry.relativePath, normalizedPatterns)) {
|
|
1540
|
+
results.push(options?.absolutePaths ? entry.path : entry.relativePath);
|
|
1541
|
+
}
|
|
1542
|
+
return undefined;
|
|
1543
|
+
}, options);
|
|
1544
|
+
searchLogger.debug('File search complete', { startPath, matchCount: results.length });
|
|
1545
|
+
return results;
|
|
1546
|
+
}
|
|
1547
|
+
|
|
1548
|
+
createScopedLogger('project-scope:heuristics:entry-points');
|
|
1549
|
+
/**
|
|
1550
|
+
* Cache for entry point discovery results.
|
|
1551
|
+
* TTL: 60 seconds (entry points are relatively stable)
|
|
1552
|
+
*/
|
|
1553
|
+
createCache({ ttl: 60000, maxSize: 50 });
|
|
1554
|
+
|
|
1555
|
+
createScopedLogger('project-scope:tech');
|
|
1556
|
+
/**
|
|
1557
|
+
* Cache for tech detection results.
|
|
1558
|
+
* TTL: 60 seconds (tech stack can change during active development)
|
|
1559
|
+
*/
|
|
1560
|
+
createCache({ ttl: 60000, maxSize: 50 });
|
|
1561
|
+
|
|
1562
|
+
createScopedLogger('project-scope:heuristics:project-type');
|
|
1563
|
+
|
|
1564
|
+
const rootLogger = createScopedLogger('project-scope:root');
|
|
1565
|
+
/**
|
|
1566
|
+
* Files indicating workspace/monorepo root.
|
|
1567
|
+
*/
|
|
1568
|
+
const WORKSPACE_MARKERS = ['nx.json', 'turbo.json', 'lerna.json', 'pnpm-workspace.yaml', 'rush.json'];
|
|
1569
|
+
/**
|
|
1570
|
+
* Find workspace root (monorepo root).
|
|
1571
|
+
* Searches up for workspace markers like nx.json, turbo.json, etc.
|
|
1572
|
+
*
|
|
1573
|
+
* @param startPath - Starting path
|
|
1574
|
+
* @returns Workspace root path or null
|
|
1575
|
+
*
|
|
1576
|
+
* @example
|
|
1577
|
+
* ```typescript
|
|
1578
|
+
* import { findWorkspaceRoot } from '@hyperfrontend/project-scope'
|
|
1579
|
+
*
|
|
1580
|
+
* const root = findWorkspaceRoot('./libs/my-lib')
|
|
1581
|
+
* if (root) {
|
|
1582
|
+
* console.log('Monorepo root:', root) // e.g., '/home/user/my-monorepo'
|
|
1583
|
+
* }
|
|
1584
|
+
* ```
|
|
1585
|
+
*/
|
|
1586
|
+
function findWorkspaceRoot(startPath) {
|
|
1587
|
+
rootLogger.debug('Finding workspace root', { startPath });
|
|
1588
|
+
const byMarker = locateByMarkers(startPath, WORKSPACE_MARKERS);
|
|
1589
|
+
if (byMarker) {
|
|
1590
|
+
rootLogger.debug('Found workspace root by marker', { root: byMarker });
|
|
1591
|
+
return byMarker;
|
|
1592
|
+
}
|
|
1593
|
+
const byWorkspaces = findUpwardWhere(startPath, (dir) => {
|
|
1594
|
+
const pkg = readPackageJsonIfExists(dir);
|
|
1595
|
+
return pkg?.workspaces !== undefined;
|
|
1596
|
+
});
|
|
1597
|
+
if (byWorkspaces) {
|
|
1598
|
+
rootLogger.debug('Found workspace root by workspaces field', { root: byWorkspaces });
|
|
1599
|
+
return byWorkspaces;
|
|
1600
|
+
}
|
|
1601
|
+
const byPackage = findNearestPackageJson(startPath);
|
|
1602
|
+
if (byPackage) {
|
|
1603
|
+
rootLogger.debug('Found workspace root by package.json', { root: byPackage });
|
|
1604
|
+
}
|
|
1605
|
+
else {
|
|
1606
|
+
rootLogger.debug('Workspace root not found');
|
|
1607
|
+
}
|
|
1608
|
+
return byPackage;
|
|
1609
|
+
}
|
|
1610
|
+
|
|
1611
|
+
createScopedLogger('project-scope:nx');
|
|
1612
|
+
|
|
1613
|
+
createScopedLogger('project-scope:nx:devkit');
|
|
1614
|
+
|
|
1615
|
+
createScopedLogger('project-scope:nx:config');
|
|
1616
|
+
|
|
1617
|
+
createScopedLogger('project-scope:config');
|
|
1618
|
+
/**
|
|
1619
|
+
* Cache for config detection results.
|
|
1620
|
+
* TTL: 30 seconds (configs can change frequently during setup)
|
|
1621
|
+
*/
|
|
1622
|
+
createCache({ ttl: 30000, maxSize: 50 });
|
|
1623
|
+
|
|
1624
|
+
/**
|
|
1625
|
+
* Safe copies of Number built-in methods and constants.
|
|
1626
|
+
*
|
|
1627
|
+
* These references are captured at module initialization time to protect against
|
|
1628
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1629
|
+
*
|
|
1630
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
1631
|
+
*/
|
|
1632
|
+
// Capture references at module initialization time
|
|
1633
|
+
const _parseInt = globalThis.parseInt;
|
|
1634
|
+
const _isNaN = globalThis.isNaN;
|
|
1635
|
+
// ============================================================================
|
|
1636
|
+
// Parsing
|
|
1637
|
+
// ============================================================================
|
|
1638
|
+
/**
|
|
1639
|
+
* (Safe copy) Parses a string and returns an integer.
|
|
1640
|
+
*/
|
|
1641
|
+
const parseInt = _parseInt;
|
|
1642
|
+
// ============================================================================
|
|
1643
|
+
// Global Type Checking (legacy, less strict)
|
|
1644
|
+
// ============================================================================
|
|
1645
|
+
/**
|
|
1646
|
+
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
1647
|
+
*/
|
|
1648
|
+
const globalIsNaN = _isNaN;
|
|
1649
|
+
|
|
1650
|
+
/** Logger for analysis operations */
|
|
1651
|
+
createScopedLogger('project-scope:analyze');
|
|
1652
|
+
|
|
1653
|
+
/** Logger for CLI operations */
|
|
1654
|
+
createScopedLogger('project-scope:cli');
|
|
1655
|
+
|
|
1656
|
+
createScopedLogger('project-scope:encoding');
|
|
1657
|
+
|
|
1658
|
+
createScopedLogger('project-scope:encoding:convert');
|
|
1659
|
+
|
|
1660
|
+
createScopedLogger('project-scope:heuristics:framework');
|
|
1661
|
+
/**
|
|
1662
|
+
* Cache for framework identification results.
|
|
1663
|
+
* TTL: 60 seconds (frameworks are stable but can change during development)
|
|
1664
|
+
*/
|
|
1665
|
+
createCache({ ttl: 60000, maxSize: 50 });
|
|
1666
|
+
|
|
1667
|
+
createScopedLogger('project-scope:vfs:tree');
|
|
1668
|
+
|
|
1669
|
+
createScopedLogger('project-scope:vfs:factory');
|
|
1670
|
+
|
|
1671
|
+
createScopedLogger('project-scope:vfs');
|
|
1672
|
+
|
|
1673
|
+
createScopedLogger('project-scope:vfs:diff');
|
|
1674
|
+
|
|
1675
|
+
/**
|
|
1676
|
+
* Dependency Graph
|
|
1677
|
+
*
|
|
1678
|
+
* Builds and analyzes dependency relationships between workspace projects.
|
|
1679
|
+
* Provides functions for traversing the dependency graph and determining
|
|
1680
|
+
* build/release order.
|
|
1681
|
+
*/
|
|
1682
|
+
/**
|
|
1683
|
+
* Finds internal dependencies in a package.json.
|
|
1684
|
+
* Returns names of workspace packages that this package depends on.
|
|
1685
|
+
*
|
|
1686
|
+
* @param packageJson - Parsed package.json content
|
|
1687
|
+
* @param workspacePackageNames - Set of all package names in the workspace
|
|
1688
|
+
* @returns Array of internal dependency names
|
|
1689
|
+
*
|
|
1690
|
+
* @example
|
|
1691
|
+
* ```typescript
|
|
1692
|
+
* const internalDeps = findInternalDependencies(packageJson, allPackageNames)
|
|
1693
|
+
* // ['@scope/lib-a', '@scope/lib-b']
|
|
1694
|
+
* ```
|
|
1695
|
+
*/
|
|
1696
|
+
function findInternalDependencies(packageJson, workspacePackageNames) {
|
|
1697
|
+
const internal = [];
|
|
1698
|
+
const allDeps = {
|
|
1699
|
+
...packageJson.dependencies,
|
|
1700
|
+
...packageJson.devDependencies,
|
|
1701
|
+
...packageJson.peerDependencies,
|
|
1702
|
+
...packageJson.optionalDependencies,
|
|
1703
|
+
};
|
|
1704
|
+
for (const depName of keys(allDeps)) {
|
|
1705
|
+
if (workspacePackageNames.has(depName)) {
|
|
1706
|
+
internal.push(depName);
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
return internal;
|
|
1710
|
+
}
|
|
1711
|
+
/**
|
|
1712
|
+
* Finds internal dependencies with type information.
|
|
1713
|
+
*
|
|
1714
|
+
* @param packageName - Name of the package being analyzed
|
|
1715
|
+
* @param packageJson - Parsed package.json content
|
|
1716
|
+
* @param workspacePackageNames - Set of all package names in the workspace
|
|
1717
|
+
* @returns Array of dependency edges with type information
|
|
1718
|
+
*/
|
|
1719
|
+
function findInternalDependenciesWithTypes(packageName, packageJson, workspacePackageNames) {
|
|
1720
|
+
const edges = [];
|
|
1721
|
+
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];
|
|
1722
|
+
for (const depType of depTypes) {
|
|
1723
|
+
const deps = packageJson[depType];
|
|
1724
|
+
if (deps) {
|
|
1725
|
+
for (const [depName, versionRange] of entries(deps)) {
|
|
1726
|
+
if (workspacePackageNames.has(depName)) {
|
|
1727
|
+
edges.push({
|
|
1728
|
+
from: packageName,
|
|
1729
|
+
to: depName,
|
|
1730
|
+
type: depType,
|
|
1731
|
+
versionRange,
|
|
1732
|
+
});
|
|
1733
|
+
}
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
}
|
|
1737
|
+
return edges;
|
|
1738
|
+
}
|
|
1739
|
+
/**
|
|
1740
|
+
* Builds a complete dependency graph from a list of projects.
|
|
1741
|
+
*
|
|
1742
|
+
* @param projects - List of projects to analyze
|
|
1743
|
+
* @returns Dependency graph analysis result
|
|
1744
|
+
*
|
|
1745
|
+
* @example
|
|
1746
|
+
* ```typescript
|
|
1747
|
+
* import { buildDependencyGraph, discoverPackages } from '@hyperfrontend/versioning'
|
|
1748
|
+
*
|
|
1749
|
+
* const { projects } = discoverPackages()
|
|
1750
|
+
* const analysis = buildDependencyGraph(projects)
|
|
1751
|
+
*
|
|
1752
|
+
* // Get packages that depend on 'lib-utils'
|
|
1753
|
+
* const dependents = analysis.dependencyGraph.get('lib-utils') ?? []
|
|
1754
|
+
*
|
|
1755
|
+
* // Get packages in topological order for building
|
|
1756
|
+
* const buildOrder = getTopologicalOrder(analysis)
|
|
1757
|
+
* ```
|
|
1758
|
+
*/
|
|
1759
|
+
function buildDependencyGraph(projects) {
|
|
1760
|
+
const packageNames = createSet(projects.map((p) => p.name));
|
|
1761
|
+
const edges = [];
|
|
1762
|
+
// Collect all edges
|
|
1763
|
+
for (const project of projects) {
|
|
1764
|
+
const projectEdges = findInternalDependenciesWithTypes(project.name, project.packageJson, packageNames);
|
|
1765
|
+
edges.push(...projectEdges);
|
|
1766
|
+
}
|
|
1767
|
+
// Build forward graph (dependency -> dependents)
|
|
1768
|
+
const dependencyGraph = createMap();
|
|
1769
|
+
for (const name of packageNames) {
|
|
1770
|
+
dependencyGraph.set(name, []);
|
|
1771
|
+
}
|
|
1772
|
+
for (const edge of edges) {
|
|
1773
|
+
const dependents = dependencyGraph.get(edge.to);
|
|
1774
|
+
if (dependents) {
|
|
1775
|
+
dependents.push(edge.from);
|
|
1776
|
+
}
|
|
1777
|
+
}
|
|
1778
|
+
// Build reverse graph (package -> dependencies)
|
|
1779
|
+
const reverseDependencyGraph = createMap();
|
|
1780
|
+
for (const name of packageNames) {
|
|
1781
|
+
reverseDependencyGraph.set(name, []);
|
|
1782
|
+
}
|
|
1783
|
+
for (const edge of edges) {
|
|
1784
|
+
const deps = reverseDependencyGraph.get(edge.from);
|
|
1785
|
+
if (deps) {
|
|
1786
|
+
deps.push(edge.to);
|
|
1787
|
+
}
|
|
1788
|
+
}
|
|
1789
|
+
// Find leaf packages (no dependents)
|
|
1790
|
+
const leafPackages = [];
|
|
1791
|
+
for (const [name, dependents] of dependencyGraph) {
|
|
1792
|
+
if (dependents.length === 0) {
|
|
1793
|
+
leafPackages.push(name);
|
|
1794
|
+
}
|
|
1795
|
+
}
|
|
1796
|
+
// Find root packages (no dependencies)
|
|
1797
|
+
const rootPackages = [];
|
|
1798
|
+
for (const [name, deps] of reverseDependencyGraph) {
|
|
1799
|
+
if (deps.length === 0) {
|
|
1800
|
+
rootPackages.push(name);
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
// Detect circular dependencies
|
|
1804
|
+
const { hasCircular, cycles } = detectCircularDependencies(reverseDependencyGraph);
|
|
1805
|
+
// Convert to readonly maps
|
|
1806
|
+
const readonlyDependencyGraph = createMap();
|
|
1807
|
+
for (const [key, value] of dependencyGraph) {
|
|
1808
|
+
readonlyDependencyGraph.set(key, [...value]);
|
|
1809
|
+
}
|
|
1810
|
+
const readonlyReverseDependencyGraph = createMap();
|
|
1811
|
+
for (const [key, value] of reverseDependencyGraph) {
|
|
1812
|
+
readonlyReverseDependencyGraph.set(key, [...value]);
|
|
1813
|
+
}
|
|
1814
|
+
return {
|
|
1815
|
+
dependencyGraph: readonlyDependencyGraph,
|
|
1816
|
+
reverseDependencyGraph: readonlyReverseDependencyGraph,
|
|
1817
|
+
edges,
|
|
1818
|
+
leafPackages,
|
|
1819
|
+
rootPackages,
|
|
1820
|
+
hasCircularDependencies: hasCircular,
|
|
1821
|
+
circularDependencies: cycles,
|
|
1822
|
+
};
|
|
1823
|
+
}
|
|
1824
|
+
/**
|
|
1825
|
+
* Detects circular dependencies in the graph using DFS.
|
|
1826
|
+
*
|
|
1827
|
+
* @param reverseDependencyGraph - Map of package to its dependencies
|
|
1828
|
+
* @returns Detection result with cycle information
|
|
1829
|
+
*/
|
|
1830
|
+
function detectCircularDependencies(reverseDependencyGraph) {
|
|
1831
|
+
const cycles = [];
|
|
1832
|
+
const visited = createSet();
|
|
1833
|
+
const recursionStack = createSet();
|
|
1834
|
+
const path = [];
|
|
1835
|
+
/**
|
|
1836
|
+
* Depth-first search to detect cycles.
|
|
1837
|
+
*
|
|
1838
|
+
* @param node - Current node being visited
|
|
1839
|
+
* @returns True if a cycle was found
|
|
1840
|
+
*/
|
|
1841
|
+
function dfs(node) {
|
|
1842
|
+
visited.add(node);
|
|
1843
|
+
recursionStack.add(node);
|
|
1844
|
+
path.push(node);
|
|
1845
|
+
const deps = reverseDependencyGraph.get(node) ?? [];
|
|
1846
|
+
for (const dep of deps) {
|
|
1847
|
+
if (!visited.has(dep)) {
|
|
1848
|
+
if (dfs(dep)) {
|
|
1849
|
+
return true;
|
|
1850
|
+
}
|
|
1851
|
+
}
|
|
1852
|
+
else if (recursionStack.has(dep)) {
|
|
1853
|
+
// Found a cycle
|
|
1854
|
+
const cycleStart = path.indexOf(dep);
|
|
1855
|
+
const cycle = path.slice(cycleStart);
|
|
1856
|
+
cycle.push(dep); // Close the cycle
|
|
1857
|
+
cycles.push(cycle);
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
path.pop();
|
|
1861
|
+
recursionStack.delete(node);
|
|
1862
|
+
return false;
|
|
1863
|
+
}
|
|
1864
|
+
for (const node of reverseDependencyGraph.keys()) {
|
|
1865
|
+
if (!visited.has(node)) {
|
|
1866
|
+
dfs(node);
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
return {
|
|
1870
|
+
hasCircular: cycles.length > 0,
|
|
1871
|
+
cycles,
|
|
1872
|
+
};
|
|
1873
|
+
}
|
|
1874
|
+
/**
|
|
1875
|
+
* Gets a topological ordering of packages for building.
|
|
1876
|
+
* Packages with no dependencies come first.
|
|
1877
|
+
*
|
|
1878
|
+
* @param analysis - Dependency graph analysis result
|
|
1879
|
+
* @returns Array of package names in build order
|
|
1880
|
+
* @throws {Error} If circular dependencies exist
|
|
1881
|
+
*
|
|
1882
|
+
* @example
|
|
1883
|
+
* ```typescript
|
|
1884
|
+
* const buildOrder = getTopologicalOrder(analysis)
|
|
1885
|
+
* for (const pkg of buildOrder) {
|
|
1886
|
+
* await build(pkg)
|
|
1887
|
+
* }
|
|
1888
|
+
* ```
|
|
1889
|
+
*/
|
|
1890
|
+
function getTopologicalOrder(analysis) {
|
|
1891
|
+
if (analysis.hasCircularDependencies) {
|
|
1892
|
+
throw createError(`Circular dependencies detected: ${analysis.circularDependencies.map((c) => c.join(' -> ')).join(', ')}`);
|
|
1893
|
+
}
|
|
1894
|
+
const result = [];
|
|
1895
|
+
const inDegree = createMap();
|
|
1896
|
+
const adjacency = createMap();
|
|
1897
|
+
// Initialize
|
|
1898
|
+
for (const [name, deps] of analysis.reverseDependencyGraph) {
|
|
1899
|
+
inDegree.set(name, deps.length);
|
|
1900
|
+
adjacency.set(name, []);
|
|
1901
|
+
}
|
|
1902
|
+
// Build adjacency list (dependency -> dependents)
|
|
1903
|
+
for (const [name, deps] of analysis.reverseDependencyGraph) {
|
|
1904
|
+
for (const dep of deps) {
|
|
1905
|
+
const adj = adjacency.get(dep);
|
|
1906
|
+
if (adj) {
|
|
1907
|
+
adj.push(name);
|
|
1908
|
+
}
|
|
1909
|
+
}
|
|
1910
|
+
}
|
|
1911
|
+
// Kahn's algorithm
|
|
1912
|
+
const queue = [...analysis.rootPackages];
|
|
1913
|
+
while (queue.length > 0) {
|
|
1914
|
+
const node = queue.shift();
|
|
1915
|
+
if (node === undefined) {
|
|
1916
|
+
break;
|
|
1917
|
+
}
|
|
1918
|
+
result.push(node);
|
|
1919
|
+
const dependents = adjacency.get(node) ?? [];
|
|
1920
|
+
for (const dependent of dependents) {
|
|
1921
|
+
const degree = inDegree.get(dependent) ?? 0;
|
|
1922
|
+
inDegree.set(dependent, degree - 1);
|
|
1923
|
+
if (degree - 1 === 0) {
|
|
1924
|
+
queue.push(dependent);
|
|
1925
|
+
}
|
|
1926
|
+
}
|
|
1927
|
+
}
|
|
1928
|
+
return result;
|
|
1929
|
+
}
|
|
1930
|
+
/**
|
|
1931
|
+
* Gets all transitive dependents of a package (direct and indirect).
|
|
1932
|
+
*
|
|
1933
|
+
* @param workspace - The workspace containing projects
|
|
1934
|
+
* @param packageName - Name of the package to analyze
|
|
1935
|
+
* @returns Set of all packages that depend on this package
|
|
1936
|
+
*
|
|
1937
|
+
* @example
|
|
1938
|
+
* ```typescript
|
|
1939
|
+
* // If lib-a depends on lib-utils and app-main depends on lib-a
|
|
1940
|
+
* // Then getTransitiveDependents('lib-utils') returns ['lib-a', 'app-main']
|
|
1941
|
+
* ```
|
|
1942
|
+
*/
|
|
1943
|
+
function getTransitiveDependents(workspace, packageName) {
|
|
1944
|
+
const dependents = createSet();
|
|
1945
|
+
const queue = [packageName];
|
|
1946
|
+
while (queue.length > 0) {
|
|
1947
|
+
const current = queue.shift();
|
|
1948
|
+
if (current === undefined) {
|
|
1949
|
+
break;
|
|
1950
|
+
}
|
|
1951
|
+
const directDependents = workspace.dependencyGraph.get(current) ?? [];
|
|
1952
|
+
for (const dep of directDependents) {
|
|
1953
|
+
if (!dependents.has(dep)) {
|
|
1954
|
+
dependents.add(dep);
|
|
1955
|
+
queue.push(dep);
|
|
1956
|
+
}
|
|
1957
|
+
}
|
|
1958
|
+
}
|
|
1959
|
+
return dependents;
|
|
1960
|
+
}
|
|
1961
|
+
/**
|
|
1962
|
+
* Gets all transitive dependencies of a package (direct and indirect).
|
|
1963
|
+
*
|
|
1964
|
+
* @param workspace - The workspace containing projects
|
|
1965
|
+
* @param packageName - Name of the package to analyze
|
|
1966
|
+
* @returns Set of all packages this package depends on
|
|
1967
|
+
*/
|
|
1968
|
+
function getTransitiveDependencies(workspace, packageName) {
|
|
1969
|
+
const dependencies = createSet();
|
|
1970
|
+
const queue = [packageName];
|
|
1971
|
+
while (queue.length > 0) {
|
|
1972
|
+
const current = queue.shift();
|
|
1973
|
+
if (current === undefined) {
|
|
1974
|
+
break;
|
|
1975
|
+
}
|
|
1976
|
+
const directDeps = workspace.reverseDependencyGraph.get(current) ?? [];
|
|
1977
|
+
for (const dep of directDeps) {
|
|
1978
|
+
if (!dependencies.has(dep)) {
|
|
1979
|
+
dependencies.add(dep);
|
|
1980
|
+
queue.push(dep);
|
|
1981
|
+
}
|
|
1982
|
+
}
|
|
1983
|
+
}
|
|
1984
|
+
return dependencies;
|
|
1985
|
+
}
|
|
1986
|
+
/**
|
|
1987
|
+
* Checks if package A transitively depends on package B.
|
|
1988
|
+
*
|
|
1989
|
+
* @param workspace - The workspace containing projects
|
|
1990
|
+
* @param packageA - Name of the potentially dependent package
|
|
1991
|
+
* @param packageB - Name of the potential dependency
|
|
1992
|
+
* @returns True if packageA transitively depends on packageB
|
|
1993
|
+
*/
|
|
1994
|
+
function transitivelyDependsOn(workspace, packageA, packageB) {
|
|
1995
|
+
const deps = getTransitiveDependencies(workspace, packageA);
|
|
1996
|
+
return deps.has(packageB);
|
|
1997
|
+
}
|
|
1998
|
+
|
|
1999
|
+
/**
|
|
2000
|
+
* Project Model
|
|
2001
|
+
*
|
|
2002
|
+
* Represents a single project/package within a workspace.
|
|
2003
|
+
* Contains package.json data, paths, and dependency information.
|
|
2004
|
+
*/
|
|
2005
|
+
/**
|
|
2006
|
+
* Creates a new Project object.
|
|
2007
|
+
*
|
|
2008
|
+
* @param options - Project properties
|
|
2009
|
+
* @returns A new Project object
|
|
2010
|
+
*/
|
|
2011
|
+
function createProject(options) {
|
|
2012
|
+
const isPrivate = options.packageJson['private'] === true;
|
|
2013
|
+
const publishable = !isPrivate && options.name !== undefined && options.version !== undefined;
|
|
2014
|
+
return {
|
|
2015
|
+
name: options.name,
|
|
2016
|
+
version: options.version,
|
|
2017
|
+
path: options.path,
|
|
2018
|
+
packageJsonPath: options.packageJsonPath,
|
|
2019
|
+
packageJson: options.packageJson,
|
|
2020
|
+
changelogPath: options.changelogPath ?? null,
|
|
2021
|
+
internalDependencies: options.internalDependencies ?? [],
|
|
2022
|
+
internalDependents: options.internalDependents ?? [],
|
|
2023
|
+
publishable,
|
|
2024
|
+
private: isPrivate,
|
|
2025
|
+
};
|
|
2026
|
+
}
|
|
2027
|
+
/**
|
|
2028
|
+
* Checks if a project is publishable (public and has name/version).
|
|
2029
|
+
*
|
|
2030
|
+
* @param project - The project to check
|
|
2031
|
+
* @returns True if the project can be published
|
|
2032
|
+
*/
|
|
2033
|
+
function isPublishable(project) {
|
|
2034
|
+
return project.publishable;
|
|
2035
|
+
}
|
|
2036
|
+
/**
|
|
2037
|
+
* Checks if a project is private.
|
|
2038
|
+
*
|
|
2039
|
+
* @param project - The project to check
|
|
2040
|
+
* @returns True if the project is marked as private
|
|
2041
|
+
*/
|
|
2042
|
+
function isPrivate(project) {
|
|
2043
|
+
return project.private;
|
|
2044
|
+
}
|
|
2045
|
+
/**
|
|
2046
|
+
* Checks if a project has a changelog file.
|
|
2047
|
+
*
|
|
2048
|
+
* @param project - The project to check
|
|
2049
|
+
* @returns True if changelog exists
|
|
2050
|
+
*/
|
|
2051
|
+
function hasChangelog$1(project) {
|
|
2052
|
+
return project.changelogPath !== null;
|
|
2053
|
+
}
|
|
2054
|
+
/**
|
|
2055
|
+
* Checks if a project has any internal dependencies.
|
|
2056
|
+
*
|
|
2057
|
+
* @param project - The project to check
|
|
2058
|
+
* @returns True if project depends on other workspace packages
|
|
2059
|
+
*/
|
|
2060
|
+
function hasInternalDependencies(project) {
|
|
2061
|
+
return project.internalDependencies.length > 0;
|
|
2062
|
+
}
|
|
2063
|
+
/**
|
|
2064
|
+
* Checks if a project has any internal dependents.
|
|
2065
|
+
*
|
|
2066
|
+
* @param project - The project to check
|
|
2067
|
+
* @returns True if other workspace packages depend on this project
|
|
2068
|
+
*/
|
|
2069
|
+
function hasInternalDependents(project) {
|
|
2070
|
+
return project.internalDependents.length > 0;
|
|
2071
|
+
}
|
|
2072
|
+
/**
|
|
2073
|
+
* Gets the dependency count (internal dependencies).
|
|
2074
|
+
*
|
|
2075
|
+
* @param project - Project instance to analyze
|
|
2076
|
+
* @returns Number of internal dependencies
|
|
2077
|
+
*/
|
|
2078
|
+
function getDependencyCount(project) {
|
|
2079
|
+
return project.internalDependencies.length;
|
|
2080
|
+
}
|
|
2081
|
+
/**
|
|
2082
|
+
* Gets the dependent count (packages that depend on this one).
|
|
2083
|
+
*
|
|
2084
|
+
* @param project - Project instance to analyze
|
|
2085
|
+
* @returns Number of internal dependents
|
|
2086
|
+
*/
|
|
2087
|
+
function getDependentCount(project) {
|
|
2088
|
+
return project.internalDependents.length;
|
|
2089
|
+
}
|
|
2090
|
+
/**
|
|
2091
|
+
* Creates a copy of a project with updated internal dependents.
|
|
2092
|
+
*
|
|
2093
|
+
* @param project - The project to update
|
|
2094
|
+
* @param dependents - New list of internal dependents
|
|
2095
|
+
* @returns A new Project with updated dependents
|
|
2096
|
+
*/
|
|
2097
|
+
function withDependents(project, dependents) {
|
|
2098
|
+
return {
|
|
2099
|
+
...project,
|
|
2100
|
+
internalDependents: dependents,
|
|
2101
|
+
};
|
|
2102
|
+
}
|
|
2103
|
+
/**
|
|
2104
|
+
* Creates a copy of a project with an added internal dependent.
|
|
2105
|
+
*
|
|
2106
|
+
* @param project - The project to update
|
|
2107
|
+
* @param dependent - Name of the dependent to add
|
|
2108
|
+
* @returns A new Project with the added dependent
|
|
2109
|
+
*/
|
|
2110
|
+
function addDependent(project, dependent) {
|
|
2111
|
+
if (project.internalDependents.includes(dependent)) {
|
|
2112
|
+
return project;
|
|
2113
|
+
}
|
|
2114
|
+
return {
|
|
2115
|
+
...project,
|
|
2116
|
+
internalDependents: [...project.internalDependents, dependent],
|
|
2117
|
+
};
|
|
2118
|
+
}
|
|
2119
|
+
|
|
2120
|
+
/**
|
|
2121
|
+
* Workspace Model
|
|
2122
|
+
*
|
|
2123
|
+
* Represents a monorepo workspace with multiple projects.
|
|
2124
|
+
* Used for package discovery, dependency tracking, and coordinated versioning.
|
|
2125
|
+
*/
|
|
2126
|
+
/**
|
|
2127
|
+
* Default workspace discovery patterns.
|
|
2128
|
+
*/
|
|
2129
|
+
const DEFAULT_PATTERNS = [
|
|
2130
|
+
'libs/*/package.json',
|
|
2131
|
+
'apps/*/package.json',
|
|
2132
|
+
'packages/*/package.json',
|
|
2133
|
+
'tools/*/package.json',
|
|
2134
|
+
'plugins/*/package.json',
|
|
2135
|
+
];
|
|
2136
|
+
/**
|
|
2137
|
+
* Default exclusion patterns.
|
|
2138
|
+
*/
|
|
2139
|
+
const DEFAULT_EXCLUDE = ['**/node_modules/**', '**/dist/**', '**/coverage/**', '**/.git/**'];
|
|
2140
|
+
/**
|
|
2141
|
+
* Default workspace configuration.
|
|
2142
|
+
*/
|
|
2143
|
+
const DEFAULT_WORKSPACE_CONFIG = {
|
|
2144
|
+
patterns: DEFAULT_PATTERNS,
|
|
2145
|
+
exclude: DEFAULT_EXCLUDE,
|
|
2146
|
+
includeChangelogs: true,
|
|
2147
|
+
trackDependencies: true,
|
|
2148
|
+
};
|
|
2149
|
+
/**
|
|
2150
|
+
* Creates a new workspace configuration by merging with defaults.
|
|
2151
|
+
*
|
|
2152
|
+
* @param options - Partial configuration options
|
|
2153
|
+
* @returns Complete workspace configuration
|
|
2154
|
+
*/
|
|
2155
|
+
function createWorkspaceConfig(options) {
|
|
2156
|
+
return {
|
|
2157
|
+
patterns: options?.patterns ?? DEFAULT_PATTERNS,
|
|
2158
|
+
exclude: options?.exclude ?? DEFAULT_EXCLUDE,
|
|
2159
|
+
includeChangelogs: options?.includeChangelogs ?? true,
|
|
2160
|
+
trackDependencies: options?.trackDependencies ?? true,
|
|
2161
|
+
};
|
|
2162
|
+
}
|
|
2163
|
+
/**
|
|
2164
|
+
* Creates a new workspace object.
|
|
2165
|
+
*
|
|
2166
|
+
* @param options - Workspace properties
|
|
2167
|
+
* @param options.root - Absolute path to workspace root directory
|
|
2168
|
+
* @param options.type - Type of workspace (nx, turbo, etc.)
|
|
2169
|
+
* @param options.projects - Map of project names to project objects
|
|
2170
|
+
* @param options.config - Configuration used for workspace discovery
|
|
2171
|
+
* @param options.dependencyGraph - Map of package names to their dependents
|
|
2172
|
+
* @param options.reverseDependencyGraph - Map of package names to their dependencies
|
|
2173
|
+
* @returns A new Workspace object
|
|
2174
|
+
*/
|
|
2175
|
+
function createWorkspace(options) {
|
|
2176
|
+
const projectList = [...options.projects.values()].sort((a, b) => a.name.localeCompare(b.name));
|
|
2177
|
+
return {
|
|
2178
|
+
root: options.root,
|
|
2179
|
+
type: options.type,
|
|
2180
|
+
projects: options.projects,
|
|
2181
|
+
projectList,
|
|
2182
|
+
config: options.config,
|
|
2183
|
+
dependencyGraph: options.dependencyGraph,
|
|
2184
|
+
reverseDependencyGraph: options.reverseDependencyGraph,
|
|
2185
|
+
};
|
|
2186
|
+
}
|
|
2187
|
+
/**
|
|
2188
|
+
* Gets a project by name from the workspace.
|
|
2189
|
+
*
|
|
2190
|
+
* @param workspace - Workspace to search in
|
|
2191
|
+
* @param projectName - Identifier of the project to retrieve
|
|
2192
|
+
* @returns The project or undefined if not found
|
|
2193
|
+
*/
|
|
2194
|
+
function getProject(workspace, projectName) {
|
|
2195
|
+
return workspace.projects.get(projectName);
|
|
2196
|
+
}
|
|
2197
|
+
/**
|
|
2198
|
+
* Checks if a project exists in the workspace.
|
|
2199
|
+
*
|
|
2200
|
+
* @param workspace - Workspace to search in
|
|
2201
|
+
* @param projectName - Identifier of the project to check
|
|
2202
|
+
* @returns True if the project exists
|
|
2203
|
+
*/
|
|
2204
|
+
function hasProject(workspace, projectName) {
|
|
2205
|
+
return workspace.projects.has(projectName);
|
|
2206
|
+
}
|
|
2207
|
+
/**
|
|
2208
|
+
* Gets all project names in the workspace.
|
|
2209
|
+
*
|
|
2210
|
+
* @param workspace - Workspace to retrieve project names from
|
|
2211
|
+
* @returns Array of project names
|
|
2212
|
+
*/
|
|
2213
|
+
function getProjectNames(workspace) {
|
|
2214
|
+
return workspace.projectList.map((p) => p.name);
|
|
2215
|
+
}
|
|
2216
|
+
/**
|
|
2217
|
+
* Gets the count of projects in the workspace.
|
|
2218
|
+
*
|
|
2219
|
+
* @param workspace - Workspace to count projects in
|
|
2220
|
+
* @returns Number of projects
|
|
2221
|
+
*/
|
|
2222
|
+
function getProjectCount(workspace) {
|
|
2223
|
+
return workspace.projects.size;
|
|
2224
|
+
}
|
|
2225
|
+
/**
|
|
2226
|
+
* Gets projects that depend on the given project.
|
|
2227
|
+
*
|
|
2228
|
+
* @param workspace - Workspace containing the dependency graph
|
|
2229
|
+
* @param projectName - Name of the dependency
|
|
2230
|
+
* @returns Array of dependent project names
|
|
2231
|
+
*/
|
|
2232
|
+
function getDependents(workspace, projectName) {
|
|
2233
|
+
return workspace.dependencyGraph.get(projectName) ?? [];
|
|
2234
|
+
}
|
|
2235
|
+
/**
|
|
2236
|
+
* Gets projects that the given project depends on.
|
|
2237
|
+
*
|
|
2238
|
+
* @param workspace - Workspace containing the dependency graph
|
|
2239
|
+
* @param projectName - Identifier of the project to look up
|
|
2240
|
+
* @returns Array of dependency project names
|
|
2241
|
+
*/
|
|
2242
|
+
function getDependencies(workspace, projectName) {
|
|
2243
|
+
return workspace.reverseDependencyGraph.get(projectName) ?? [];
|
|
2244
|
+
}
|
|
2245
|
+
/**
|
|
2246
|
+
* Checks if projectA depends on projectB.
|
|
2247
|
+
*
|
|
2248
|
+
* @param workspace - Workspace containing the dependency graph
|
|
2249
|
+
* @param projectA - Name of the potentially dependent project
|
|
2250
|
+
* @param projectB - Name of the potential dependency
|
|
2251
|
+
* @returns True if projectA depends on projectB
|
|
2252
|
+
*/
|
|
2253
|
+
function dependsOn(workspace, projectA, projectB) {
|
|
2254
|
+
const deps = getDependencies(workspace, projectA);
|
|
2255
|
+
return deps.includes(projectB);
|
|
2256
|
+
}
|
|
2257
|
+
|
|
2258
|
+
/**
|
|
2259
|
+
* Changelog Discovery
|
|
2260
|
+
*
|
|
2261
|
+
* Discovers CHANGELOG.md files within workspace projects.
|
|
2262
|
+
*/
|
|
2263
|
+
/**
|
|
2264
|
+
* Common changelog file names in priority order.
|
|
2265
|
+
*/
|
|
2266
|
+
const CHANGELOG_NAMES = ['CHANGELOG.md', 'Changelog.md', 'changelog.md', 'HISTORY.md', 'CHANGES.md'];
|
|
2267
|
+
/**
|
|
2268
|
+
* Finds changelog files for a list of packages.
|
|
2269
|
+
* Returns a map of project path to changelog absolute path.
|
|
2270
|
+
*
|
|
2271
|
+
* @param workspaceRoot - Workspace root path
|
|
2272
|
+
* @param packages - List of packages to find changelogs for
|
|
2273
|
+
* @returns Map of project path to changelog path
|
|
2274
|
+
*/
|
|
2275
|
+
function findChangelogs(workspaceRoot, packages) {
|
|
2276
|
+
const result = createMap();
|
|
2277
|
+
for (const pkg of packages) {
|
|
2278
|
+
const changelogPath = findProjectChangelog(pkg.path);
|
|
2279
|
+
if (changelogPath) {
|
|
2280
|
+
result.set(pkg.path, changelogPath);
|
|
2281
|
+
}
|
|
2282
|
+
}
|
|
2283
|
+
return result;
|
|
2284
|
+
}
|
|
2285
|
+
/**
|
|
2286
|
+
* Finds the changelog file for a single project.
|
|
2287
|
+
* Checks for common changelog names in order of priority.
|
|
2288
|
+
*
|
|
2289
|
+
* @param projectPath - Path to project directory
|
|
2290
|
+
* @returns Absolute path to changelog or null if not found
|
|
2291
|
+
*
|
|
2292
|
+
* @example
|
|
2293
|
+
* ```typescript
|
|
2294
|
+
* import { findProjectChangelog } from '@hyperfrontend/versioning'
|
|
2295
|
+
*
|
|
2296
|
+
* const changelogPath = findProjectChangelog('./libs/my-lib')
|
|
2297
|
+
* if (changelogPath) {
|
|
2298
|
+
* console.log('Found changelog:', changelogPath)
|
|
2299
|
+
* }
|
|
2300
|
+
* ```
|
|
2301
|
+
*/
|
|
2302
|
+
function findProjectChangelog(projectPath) {
|
|
2303
|
+
for (const name of CHANGELOG_NAMES) {
|
|
2304
|
+
const changelogPath = join$1(projectPath, name);
|
|
2305
|
+
if (exists(changelogPath)) {
|
|
2306
|
+
return changelogPath;
|
|
2307
|
+
}
|
|
2308
|
+
}
|
|
2309
|
+
return null;
|
|
2310
|
+
}
|
|
2311
|
+
/**
|
|
2312
|
+
* Discovers all changelog files within a workspace.
|
|
2313
|
+
*
|
|
2314
|
+
* @param workspaceRoot - Workspace root path
|
|
2315
|
+
* @param patterns - Glob patterns for finding changelogs (default: all CHANGELOGs)
|
|
2316
|
+
* @returns Array of discovered changelog information
|
|
2317
|
+
*
|
|
2318
|
+
* @example
|
|
2319
|
+
* ```typescript
|
|
2320
|
+
* import { discoverAllChangelogs } from '@hyperfrontend/versioning'
|
|
2321
|
+
*
|
|
2322
|
+
* const changelogs = discoverAllChangelogs('/path/to/workspace')
|
|
2323
|
+
* for (const changelog of changelogs) {
|
|
2324
|
+
* console.log(`${changelog.projectPath} -> ${changelog.path}`)
|
|
2325
|
+
* }
|
|
2326
|
+
* ```
|
|
2327
|
+
*/
|
|
2328
|
+
function discoverAllChangelogs(workspaceRoot, patterns = ['**/CHANGELOG.md', '**/Changelog.md', '**/changelog.md']) {
|
|
2329
|
+
const results = [];
|
|
2330
|
+
const files = findFiles(workspaceRoot, [...patterns], {
|
|
2331
|
+
ignorePatterns: ['**/node_modules/**', '**/dist/**'],
|
|
2332
|
+
absolutePaths: false,
|
|
2333
|
+
});
|
|
2334
|
+
for (const relativePath of files) {
|
|
2335
|
+
const absolutePath = join$1(workspaceRoot, relativePath);
|
|
2336
|
+
const projectPath = dirname(absolutePath);
|
|
2337
|
+
const filename = relativePath.split('/').pop() ?? 'CHANGELOG.md';
|
|
2338
|
+
results.push({
|
|
2339
|
+
path: absolutePath,
|
|
2340
|
+
relativePath,
|
|
2341
|
+
projectPath,
|
|
2342
|
+
filename,
|
|
2343
|
+
});
|
|
2344
|
+
}
|
|
2345
|
+
return results;
|
|
2346
|
+
}
|
|
2347
|
+
|
|
2348
|
+
/**
|
|
2349
|
+
* Package Discovery
|
|
2350
|
+
*
|
|
2351
|
+
* Discovers packages within a workspace by finding package.json files
|
|
2352
|
+
* and extracting relevant metadata. Uses project-scope for file operations.
|
|
2353
|
+
*/
|
|
2354
|
+
/**
|
|
2355
|
+
* Discovers all packages within a workspace.
|
|
2356
|
+
* Finds package.json files, parses them, and optionally discovers
|
|
2357
|
+
* changelogs and internal dependencies.
|
|
2358
|
+
*
|
|
2359
|
+
* @param options - Discovery options
|
|
2360
|
+
* @returns Discovery result with all found packages
|
|
2361
|
+
* @throws {Error} If workspace root cannot be found
|
|
2362
|
+
*
|
|
2363
|
+
* @example
|
|
2364
|
+
* ```typescript
|
|
2365
|
+
* import { discoverPackages } from '@hyperfrontend/versioning'
|
|
2366
|
+
*
|
|
2367
|
+
* // Discover all packages in current workspace
|
|
2368
|
+
* const result = discoverPackages()
|
|
2369
|
+
*
|
|
2370
|
+
* // Discover with custom patterns
|
|
2371
|
+
* const result = discoverPackages({
|
|
2372
|
+
* patterns: ['packages/*\/package.json'],
|
|
2373
|
+
* includeChangelogs: true
|
|
2374
|
+
* })
|
|
2375
|
+
*
|
|
2376
|
+
* // Access discovered projects
|
|
2377
|
+
* for (const project of result.projects) {
|
|
2378
|
+
* console.log(`${project.name}@${project.version}`)
|
|
2379
|
+
* }
|
|
2380
|
+
* ```
|
|
2381
|
+
*/
|
|
2382
|
+
function discoverPackages(options = {}) {
|
|
2383
|
+
// Resolve workspace root
|
|
2384
|
+
const workspaceRoot = options.workspaceRoot ?? findWorkspaceRoot(process.cwd());
|
|
2385
|
+
if (!workspaceRoot) {
|
|
2386
|
+
throw createError('Could not find workspace root. Ensure you are in a monorepo with nx.json, turbo.json, or workspaces field.');
|
|
2387
|
+
}
|
|
2388
|
+
// Build configuration
|
|
2389
|
+
const config = {
|
|
2390
|
+
patterns: options.patterns ?? DEFAULT_WORKSPACE_CONFIG.patterns,
|
|
2391
|
+
exclude: options.exclude ?? DEFAULT_WORKSPACE_CONFIG.exclude,
|
|
2392
|
+
includeChangelogs: options.includeChangelogs ?? DEFAULT_WORKSPACE_CONFIG.includeChangelogs,
|
|
2393
|
+
trackDependencies: options.trackDependencies ?? DEFAULT_WORKSPACE_CONFIG.trackDependencies,
|
|
2394
|
+
};
|
|
2395
|
+
// Find all package.json files
|
|
2396
|
+
const packageJsonPaths = findPackageJsonFiles(workspaceRoot, config);
|
|
2397
|
+
// Parse package.json files
|
|
2398
|
+
const rawPackages = parsePackageJsonFiles(workspaceRoot, packageJsonPaths);
|
|
2399
|
+
// Collect all package names for internal dependency detection
|
|
2400
|
+
const packageNames = createSet(rawPackages.map((p) => p.name));
|
|
2401
|
+
// Find changelogs if requested
|
|
2402
|
+
const changelogMap = config.includeChangelogs ? findChangelogs(workspaceRoot, rawPackages) : createMap();
|
|
2403
|
+
// Build projects with changelog paths
|
|
2404
|
+
const rawWithChangelogs = rawPackages.map((pkg) => ({
|
|
2405
|
+
...pkg,
|
|
2406
|
+
changelogPath: changelogMap.get(pkg.path) ?? null,
|
|
2407
|
+
}));
|
|
2408
|
+
// Calculate internal dependencies
|
|
2409
|
+
const projects = config.trackDependencies
|
|
2410
|
+
? buildProjectsWithDependencies(rawWithChangelogs, packageNames)
|
|
2411
|
+
: rawWithChangelogs.map((pkg) => createProject(pkg));
|
|
2412
|
+
// Build project map
|
|
2413
|
+
const projectMap = createMap();
|
|
2414
|
+
for (const project of projects) {
|
|
2415
|
+
projectMap.set(project.name, project);
|
|
2416
|
+
}
|
|
2417
|
+
return {
|
|
2418
|
+
projects,
|
|
2419
|
+
projectMap,
|
|
2420
|
+
packageNames,
|
|
2421
|
+
workspaceRoot,
|
|
2422
|
+
config,
|
|
2423
|
+
};
|
|
2424
|
+
}
|
|
2425
|
+
/**
|
|
2426
|
+
* Finds all package.json files matching the configured patterns.
|
|
2427
|
+
*
|
|
2428
|
+
* @param workspaceRoot - Root directory to search from
|
|
2429
|
+
* @param config - Workspace configuration
|
|
2430
|
+
* @returns Array of relative paths to package.json files
|
|
2431
|
+
*/
|
|
2432
|
+
function findPackageJsonFiles(workspaceRoot, config) {
|
|
2433
|
+
const patterns = [...config.patterns];
|
|
2434
|
+
const excludePatterns = [...config.exclude];
|
|
2435
|
+
return findFiles(workspaceRoot, patterns, {
|
|
2436
|
+
ignorePatterns: excludePatterns,
|
|
2437
|
+
});
|
|
2438
|
+
}
|
|
2439
|
+
/**
|
|
2440
|
+
* Parses package.json files and extracts metadata.
|
|
2441
|
+
*
|
|
2442
|
+
* @param workspaceRoot - Workspace root path
|
|
2443
|
+
* @param packageJsonPaths - Relative paths to package.json files
|
|
2444
|
+
* @returns Array of raw package info objects
|
|
2445
|
+
*/
|
|
2446
|
+
function parsePackageJsonFiles(workspaceRoot, packageJsonPaths) {
|
|
2447
|
+
const packages = [];
|
|
2448
|
+
for (const relativePath of packageJsonPaths) {
|
|
2449
|
+
const absolutePath = join$1(workspaceRoot, relativePath);
|
|
2450
|
+
const projectPath = dirname(absolutePath);
|
|
2451
|
+
try {
|
|
2452
|
+
const packageJson = readPackageJson(absolutePath);
|
|
2453
|
+
// Skip packages without a name
|
|
2454
|
+
if (!packageJson.name) {
|
|
2455
|
+
continue;
|
|
2456
|
+
}
|
|
2457
|
+
packages.push({
|
|
2458
|
+
name: packageJson.name,
|
|
2459
|
+
version: packageJson.version ?? '0.0.0',
|
|
2460
|
+
path: projectPath,
|
|
2461
|
+
packageJsonPath: absolutePath,
|
|
2462
|
+
packageJson,
|
|
2463
|
+
changelogPath: null,
|
|
2464
|
+
});
|
|
2465
|
+
}
|
|
2466
|
+
catch {
|
|
2467
|
+
// Skip packages that can't be parsed
|
|
2468
|
+
continue;
|
|
2469
|
+
}
|
|
2470
|
+
}
|
|
2471
|
+
return packages;
|
|
2472
|
+
}
|
|
2473
|
+
/**
|
|
2474
|
+
* Builds projects with internal dependency information.
|
|
2475
|
+
*
|
|
2476
|
+
* @param rawPackages - Raw package info objects
|
|
2477
|
+
* @param packageNames - Set of all package names
|
|
2478
|
+
* @returns Array of Project objects with dependencies populated
|
|
2479
|
+
*/
|
|
2480
|
+
function buildProjectsWithDependencies(rawPackages, packageNames) {
|
|
2481
|
+
// First pass: create projects with dependencies
|
|
2482
|
+
const projectsWithDeps = [];
|
|
2483
|
+
for (const pkg of rawPackages) {
|
|
2484
|
+
const internalDeps = findInternalDependencies(pkg.packageJson, packageNames);
|
|
2485
|
+
projectsWithDeps.push({
|
|
2486
|
+
...pkg,
|
|
2487
|
+
internalDependencies: internalDeps,
|
|
2488
|
+
});
|
|
2489
|
+
}
|
|
2490
|
+
// Build dependency -> dependents map
|
|
2491
|
+
const dependentsMap = createMap();
|
|
2492
|
+
for (const pkg of projectsWithDeps) {
|
|
2493
|
+
for (const dep of pkg.internalDependencies) {
|
|
2494
|
+
const existing = dependentsMap.get(dep) ?? [];
|
|
2495
|
+
existing.push(pkg.name);
|
|
2496
|
+
dependentsMap.set(dep, existing);
|
|
2497
|
+
}
|
|
2498
|
+
}
|
|
2499
|
+
// Second pass: add dependents to each project
|
|
2500
|
+
return projectsWithDeps.map((pkg) => {
|
|
2501
|
+
const dependents = dependentsMap.get(pkg.name) ?? [];
|
|
2502
|
+
return createProject({
|
|
2503
|
+
...pkg,
|
|
2504
|
+
internalDependents: dependents,
|
|
2505
|
+
});
|
|
2506
|
+
});
|
|
2507
|
+
}
|
|
2508
|
+
/**
|
|
2509
|
+
* Discovers a single project by path.
|
|
2510
|
+
*
|
|
2511
|
+
* @param projectPath - Path to project directory or package.json
|
|
2512
|
+
* @returns The discovered project or null if not found
|
|
2513
|
+
*/
|
|
2514
|
+
function discoverProject(projectPath) {
|
|
2515
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join$1(projectPath, 'package.json');
|
|
2516
|
+
const projectDir = projectPath.endsWith('package.json') ? dirname(projectPath) : projectPath;
|
|
2517
|
+
try {
|
|
2518
|
+
const packageJson = readPackageJson(packageJsonPath);
|
|
2519
|
+
if (!packageJson.name) {
|
|
2520
|
+
return null;
|
|
2521
|
+
}
|
|
2522
|
+
return createProject({
|
|
2523
|
+
name: packageJson.name,
|
|
2524
|
+
version: packageJson.version ?? '0.0.0',
|
|
2525
|
+
path: projectDir,
|
|
2526
|
+
packageJsonPath,
|
|
2527
|
+
packageJson,
|
|
2528
|
+
changelogPath: null,
|
|
2529
|
+
});
|
|
2530
|
+
}
|
|
2531
|
+
catch {
|
|
2532
|
+
return null;
|
|
2533
|
+
}
|
|
2534
|
+
}
|
|
2535
|
+
/**
|
|
2536
|
+
* Discovers a project by name within a workspace.
|
|
2537
|
+
*
|
|
2538
|
+
* @param projectName - Name of the project to find
|
|
2539
|
+
* @param options - Discovery options
|
|
2540
|
+
* @returns The project or null if not found
|
|
2541
|
+
*/
|
|
2542
|
+
function discoverProjectByName(projectName, options = {}) {
|
|
2543
|
+
const result = discoverPackages(options);
|
|
2544
|
+
return result.projectMap.get(projectName) ?? null;
|
|
2545
|
+
}
|
|
2546
|
+
|
|
2547
|
+
/**
|
|
2548
|
+
* Changelog Path Utilities
|
|
2549
|
+
*
|
|
2550
|
+
* Functions for checking changelog existence and resolving expected paths.
|
|
2551
|
+
*/
|
|
2552
|
+
/**
|
|
2553
|
+
* Checks if a project has a changelog file.
|
|
2554
|
+
*
|
|
2555
|
+
* @param projectPath - Directory containing the project to check
|
|
2556
|
+
* @returns True if changelog exists
|
|
2557
|
+
*/
|
|
2558
|
+
function hasChangelog(projectPath) {
|
|
2559
|
+
return findProjectChangelog(projectPath) !== null;
|
|
2560
|
+
}
|
|
2561
|
+
/**
|
|
2562
|
+
* Gets the expected changelog path for a project.
|
|
2563
|
+
* Returns the standard CHANGELOG.md path regardless of whether it exists.
|
|
2564
|
+
*
|
|
2565
|
+
* @param projectPath - Directory containing the project files
|
|
2566
|
+
* @returns Absolute path to CHANGELOG.md in the project directory
|
|
2567
|
+
*/
|
|
2568
|
+
function getExpectedChangelogPath(projectPath) {
|
|
2569
|
+
return join$1(projectPath, 'CHANGELOG.md');
|
|
2570
|
+
}
|
|
2571
|
+
|
|
2572
|
+
/**
|
|
2573
|
+
* Converts a SemVer to its canonical string representation.
|
|
2574
|
+
*
|
|
2575
|
+
* @param version - The version to format
|
|
2576
|
+
* @returns The version string (e.g., "1.2.3-alpha.1+build.123")
|
|
2577
|
+
*/
|
|
2578
|
+
function format(version) {
|
|
2579
|
+
let result = `${version.major}.${version.minor}.${version.patch}`;
|
|
2580
|
+
if (version.prerelease.length > 0) {
|
|
2581
|
+
result += '-' + version.prerelease.join('.');
|
|
2582
|
+
}
|
|
2583
|
+
if (version.build.length > 0) {
|
|
2584
|
+
result += '+' + version.build.join('.');
|
|
2585
|
+
}
|
|
2586
|
+
return result;
|
|
2587
|
+
}
|
|
2588
|
+
|
|
2589
|
+
/**
|
|
2590
|
+
* Creates a new SemVer object.
|
|
2591
|
+
*
|
|
2592
|
+
* @param options - Version components
|
|
2593
|
+
* @returns A new SemVer object
|
|
2594
|
+
*/
|
|
2595
|
+
function createSemVer(options) {
|
|
2596
|
+
return {
|
|
2597
|
+
major: options.major,
|
|
2598
|
+
minor: options.minor,
|
|
2599
|
+
patch: options.patch,
|
|
2600
|
+
prerelease: options.prerelease ?? [],
|
|
2601
|
+
build: options.build ?? [],
|
|
2602
|
+
raw: options.raw,
|
|
2603
|
+
};
|
|
2604
|
+
}
|
|
2605
|
+
|
|
2606
|
+
/**
|
|
2607
|
+
* Increments a version based on the bump type.
|
|
2608
|
+
*
|
|
2609
|
+
* @param version - The version to increment
|
|
2610
|
+
* @param type - The type of bump (major, minor, patch, etc.)
|
|
2611
|
+
* @param prereleaseId - Optional prerelease identifier for prerelease bumps
|
|
2612
|
+
* @returns A new incremented SemVer
|
|
2613
|
+
*
|
|
2614
|
+
* @example
|
|
2615
|
+
* increment(parseVersion('1.2.3'), 'minor') // 1.3.0
|
|
2616
|
+
* increment(parseVersion('1.2.3'), 'major') // 2.0.0
|
|
2617
|
+
* increment(parseVersion('1.2.3'), 'prerelease', 'alpha') // 1.2.4-alpha.0
|
|
2618
|
+
*/
|
|
2619
|
+
function increment(version, type, prereleaseId) {
|
|
2620
|
+
switch (type) {
|
|
2621
|
+
case 'major':
|
|
2622
|
+
return createSemVer({
|
|
2623
|
+
major: version.major + 1,
|
|
2624
|
+
minor: 0,
|
|
2625
|
+
patch: 0,
|
|
2626
|
+
prerelease: [],
|
|
2627
|
+
build: [],
|
|
2628
|
+
});
|
|
2629
|
+
case 'minor':
|
|
2630
|
+
return createSemVer({
|
|
2631
|
+
major: version.major,
|
|
2632
|
+
minor: version.minor + 1,
|
|
2633
|
+
patch: 0,
|
|
2634
|
+
prerelease: [],
|
|
2635
|
+
build: [],
|
|
2636
|
+
});
|
|
2637
|
+
case 'patch':
|
|
2638
|
+
// If version has prerelease, just remove it (1.2.3-alpha -> 1.2.3)
|
|
2639
|
+
if (version.prerelease.length > 0) {
|
|
2640
|
+
return createSemVer({
|
|
2641
|
+
major: version.major,
|
|
2642
|
+
minor: version.minor,
|
|
2643
|
+
patch: version.patch,
|
|
2644
|
+
prerelease: [],
|
|
2645
|
+
build: [],
|
|
2646
|
+
});
|
|
2647
|
+
}
|
|
2648
|
+
return createSemVer({
|
|
2649
|
+
major: version.major,
|
|
2650
|
+
minor: version.minor,
|
|
2651
|
+
patch: version.patch + 1,
|
|
2652
|
+
prerelease: [],
|
|
2653
|
+
build: [],
|
|
2654
|
+
});
|
|
2655
|
+
case 'premajor':
|
|
2656
|
+
return createSemVer({
|
|
2657
|
+
major: version.major + 1,
|
|
2658
|
+
minor: 0,
|
|
2659
|
+
patch: 0,
|
|
2660
|
+
prerelease: [prereleaseId ?? 'alpha', '0'],
|
|
2661
|
+
build: [],
|
|
2662
|
+
});
|
|
2663
|
+
case 'preminor':
|
|
2664
|
+
return createSemVer({
|
|
2665
|
+
major: version.major,
|
|
2666
|
+
minor: version.minor + 1,
|
|
2667
|
+
patch: 0,
|
|
2668
|
+
prerelease: [prereleaseId ?? 'alpha', '0'],
|
|
2669
|
+
build: [],
|
|
2670
|
+
});
|
|
2671
|
+
case 'prepatch':
|
|
2672
|
+
return createSemVer({
|
|
2673
|
+
major: version.major,
|
|
2674
|
+
minor: version.minor,
|
|
2675
|
+
patch: version.patch + 1,
|
|
2676
|
+
prerelease: [prereleaseId ?? 'alpha', '0'],
|
|
2677
|
+
build: [],
|
|
2678
|
+
});
|
|
2679
|
+
case 'prerelease':
|
|
2680
|
+
return incrementPrerelease(version, prereleaseId);
|
|
2681
|
+
case 'none':
|
|
2682
|
+
default:
|
|
2683
|
+
return version;
|
|
2684
|
+
}
|
|
2685
|
+
}
|
|
2686
|
+
/**
|
|
2687
|
+
* Increments the prerelease portion of a version.
|
|
2688
|
+
*
|
|
2689
|
+
* @param version - The version to increment
|
|
2690
|
+
* @param id - Optional prerelease identifier
|
|
2691
|
+
* @returns A new version with incremented prerelease
|
|
2692
|
+
*/
|
|
2693
|
+
function incrementPrerelease(version, id) {
|
|
2694
|
+
const prerelease = [...version.prerelease];
|
|
2695
|
+
if (prerelease.length === 0) {
|
|
2696
|
+
// No existing prerelease - start at patch+1 with id.0
|
|
2697
|
+
return createSemVer({
|
|
2698
|
+
major: version.major,
|
|
2699
|
+
minor: version.minor,
|
|
2700
|
+
patch: version.patch + 1,
|
|
2701
|
+
prerelease: [id ?? 'alpha', '0'],
|
|
2702
|
+
build: [],
|
|
2703
|
+
});
|
|
2704
|
+
}
|
|
2705
|
+
// Check if the last identifier is numeric
|
|
2706
|
+
const lastIdx = prerelease.length - 1;
|
|
2707
|
+
const last = prerelease[lastIdx];
|
|
2708
|
+
const lastNum = parseInt(last, 10);
|
|
2709
|
+
if (!globalIsNaN(lastNum)) {
|
|
2710
|
+
// Increment the numeric part
|
|
2711
|
+
prerelease[lastIdx] = String(lastNum + 1);
|
|
2712
|
+
}
|
|
2713
|
+
else {
|
|
2714
|
+
// Append .0
|
|
2715
|
+
prerelease.push('0');
|
|
2716
|
+
}
|
|
2717
|
+
// If a different id is specified, replace the base identifier
|
|
2718
|
+
if (id && prerelease.length > 0 && prerelease[0] !== id) {
|
|
2719
|
+
prerelease[0] = id;
|
|
2720
|
+
// Reset numeric part
|
|
2721
|
+
if (prerelease.length > 1) {
|
|
2722
|
+
prerelease[prerelease.length - 1] = '0';
|
|
2723
|
+
}
|
|
2724
|
+
}
|
|
2725
|
+
return createSemVer({
|
|
2726
|
+
major: version.major,
|
|
2727
|
+
minor: version.minor,
|
|
2728
|
+
patch: version.patch,
|
|
2729
|
+
prerelease,
|
|
2730
|
+
build: [],
|
|
2731
|
+
});
|
|
2732
|
+
}
|
|
2733
|
+
|
|
2734
|
+
/**
|
|
2735
|
+
* Maximum version string length to prevent memory exhaustion.
|
|
2736
|
+
*/
|
|
2737
|
+
const MAX_VERSION_LENGTH = 256;
|
|
2738
|
+
/**
|
|
2739
|
+
* Parses a semantic version string.
|
|
2740
|
+
*
|
|
2741
|
+
* Accepts versions in the format: MAJOR.MINOR.PATCH[-prerelease][+build]
|
|
2742
|
+
* Optional leading 'v' or '=' prefixes are stripped.
|
|
2743
|
+
*
|
|
2744
|
+
* @param input - The version string to parse
|
|
2745
|
+
* @returns A ParseVersionResult with the parsed version or error
|
|
2746
|
+
*
|
|
2747
|
+
* @example
|
|
2748
|
+
* parseVersion('1.2.3') // { success: true, version: { major: 1, minor: 2, patch: 3, ... } }
|
|
2749
|
+
* parseVersion('v1.0.0-alpha.1+build.123') // { success: true, ... }
|
|
2750
|
+
* parseVersion('invalid') // { success: false, error: '...' }
|
|
2751
|
+
*/
|
|
2752
|
+
function parseVersion(input) {
|
|
2753
|
+
// Input validation
|
|
2754
|
+
if (!input || typeof input !== 'string') {
|
|
2755
|
+
return { success: false, error: 'Version string is required' };
|
|
2756
|
+
}
|
|
2757
|
+
if (input.length > MAX_VERSION_LENGTH) {
|
|
2758
|
+
return { success: false, error: `Version string exceeds maximum length of ${MAX_VERSION_LENGTH}` };
|
|
2759
|
+
}
|
|
2760
|
+
// Strip leading whitespace
|
|
2761
|
+
let pos = 0;
|
|
2762
|
+
while (pos < input.length && isWhitespace(input.charCodeAt(pos))) {
|
|
2763
|
+
pos++;
|
|
2764
|
+
}
|
|
2765
|
+
// Strip trailing whitespace
|
|
2766
|
+
let end = input.length;
|
|
2767
|
+
while (end > pos && isWhitespace(input.charCodeAt(end - 1))) {
|
|
2768
|
+
end--;
|
|
2769
|
+
}
|
|
2770
|
+
// Strip optional leading 'v' or '='
|
|
2771
|
+
if (pos < end) {
|
|
2772
|
+
const code = input.charCodeAt(pos);
|
|
2773
|
+
if (code === 118 || code === 86) {
|
|
2774
|
+
// 'v' or 'V'
|
|
2775
|
+
pos++;
|
|
2776
|
+
}
|
|
2777
|
+
else if (code === 61) {
|
|
2778
|
+
// '='
|
|
2779
|
+
pos++;
|
|
2780
|
+
}
|
|
2781
|
+
}
|
|
2782
|
+
// Parse major version
|
|
2783
|
+
const majorResult = parseNumericIdentifier(input, pos, end);
|
|
2784
|
+
if (!majorResult.success) {
|
|
2785
|
+
return { success: false, error: majorResult.error ?? 'Invalid major version' };
|
|
2786
|
+
}
|
|
2787
|
+
pos = majorResult.endPos;
|
|
2788
|
+
// Expect dot
|
|
2789
|
+
if (pos >= end || input.charCodeAt(pos) !== 46) {
|
|
2790
|
+
// '.'
|
|
2791
|
+
return { success: false, error: 'Expected "." after major version' };
|
|
2792
|
+
}
|
|
2793
|
+
pos++;
|
|
2794
|
+
// Parse minor version
|
|
2795
|
+
const minorResult = parseNumericIdentifier(input, pos, end);
|
|
2796
|
+
if (!minorResult.success) {
|
|
2797
|
+
return { success: false, error: minorResult.error ?? 'Invalid minor version' };
|
|
2798
|
+
}
|
|
2799
|
+
pos = minorResult.endPos;
|
|
2800
|
+
// Expect dot
|
|
2801
|
+
if (pos >= end || input.charCodeAt(pos) !== 46) {
|
|
2802
|
+
// '.'
|
|
2803
|
+
return { success: false, error: 'Expected "." after minor version' };
|
|
2804
|
+
}
|
|
2805
|
+
pos++;
|
|
2806
|
+
// Parse patch version
|
|
2807
|
+
const patchResult = parseNumericIdentifier(input, pos, end);
|
|
2808
|
+
if (!patchResult.success) {
|
|
2809
|
+
return { success: false, error: patchResult.error ?? 'Invalid patch version' };
|
|
2810
|
+
}
|
|
2811
|
+
pos = patchResult.endPos;
|
|
2812
|
+
// Parse optional prerelease
|
|
2813
|
+
const prerelease = [];
|
|
2814
|
+
if (pos < end && input.charCodeAt(pos) === 45) {
|
|
2815
|
+
// '-'
|
|
2816
|
+
pos++;
|
|
2817
|
+
const prereleaseResult = parseIdentifiers(input, pos, end, [43]); // Stop at '+'
|
|
2818
|
+
if (!prereleaseResult.success) {
|
|
2819
|
+
return { success: false, error: prereleaseResult.error ?? 'Invalid prerelease' };
|
|
2820
|
+
}
|
|
2821
|
+
prerelease.push(...prereleaseResult.identifiers);
|
|
2822
|
+
pos = prereleaseResult.endPos;
|
|
2823
|
+
}
|
|
2824
|
+
// Parse optional build metadata
|
|
2825
|
+
const build = [];
|
|
2826
|
+
if (pos < end && input.charCodeAt(pos) === 43) {
|
|
2827
|
+
// '+'
|
|
2828
|
+
pos++;
|
|
2829
|
+
const buildResult = parseIdentifiers(input, pos, end, []);
|
|
2830
|
+
if (!buildResult.success) {
|
|
2831
|
+
return { success: false, error: buildResult.error ?? 'Invalid build metadata' };
|
|
2832
|
+
}
|
|
2833
|
+
build.push(...buildResult.identifiers);
|
|
2834
|
+
pos = buildResult.endPos;
|
|
2835
|
+
}
|
|
2836
|
+
// Check for trailing characters
|
|
2837
|
+
if (pos < end) {
|
|
2838
|
+
return { success: false, error: `Unexpected character at position ${pos}: "${input[pos]}"` };
|
|
2839
|
+
}
|
|
2840
|
+
return {
|
|
2841
|
+
success: true,
|
|
2842
|
+
version: createSemVer({
|
|
2843
|
+
major: majorResult.value,
|
|
2844
|
+
minor: minorResult.value,
|
|
2845
|
+
patch: patchResult.value,
|
|
2846
|
+
prerelease,
|
|
2847
|
+
build,
|
|
2848
|
+
raw: input,
|
|
2849
|
+
}),
|
|
2850
|
+
};
|
|
2851
|
+
}
|
|
2852
|
+
/**
|
|
2853
|
+
* Parses a numeric identifier (non-negative integer, no leading zeros except for "0").
|
|
2854
|
+
*
|
|
2855
|
+
* @param input - Input string to parse
|
|
2856
|
+
* @param start - Start position in the input
|
|
2857
|
+
* @param end - End position in the input
|
|
2858
|
+
* @returns Numeric parsing result
|
|
2859
|
+
*/
|
|
2860
|
+
function parseNumericIdentifier(input, start, end) {
|
|
2861
|
+
if (start >= end) {
|
|
2862
|
+
return { success: false, value: 0, endPos: start, error: 'Expected numeric identifier' };
|
|
2863
|
+
}
|
|
2864
|
+
let pos = start;
|
|
2865
|
+
const firstCode = input.charCodeAt(pos);
|
|
2866
|
+
// Must start with a digit
|
|
2867
|
+
if (!isDigit(firstCode)) {
|
|
2868
|
+
return { success: false, value: 0, endPos: pos, error: 'Expected digit' };
|
|
2869
|
+
}
|
|
2870
|
+
// Check for leading zero (only "0" is valid, not "01", "007", etc.)
|
|
2871
|
+
if (firstCode === 48 && pos + 1 < end && isDigit(input.charCodeAt(pos + 1))) {
|
|
2872
|
+
return { success: false, value: 0, endPos: pos, error: 'Numeric identifier cannot have leading zeros' };
|
|
2873
|
+
}
|
|
2874
|
+
// Consume digits
|
|
2875
|
+
let value = 0;
|
|
2876
|
+
while (pos < end && isDigit(input.charCodeAt(pos))) {
|
|
2877
|
+
value = value * 10 + (input.charCodeAt(pos) - 48);
|
|
2878
|
+
pos++;
|
|
2879
|
+
// Prevent overflow
|
|
2880
|
+
if (value > Number.MAX_SAFE_INTEGER) {
|
|
2881
|
+
return { success: false, value: 0, endPos: pos, error: 'Numeric identifier is too large' };
|
|
2882
|
+
}
|
|
2883
|
+
}
|
|
2884
|
+
return { success: true, value, endPos: pos };
|
|
2885
|
+
}
|
|
2886
|
+
/**
|
|
2887
|
+
* Parses dot-separated identifiers (for prerelease/build).
|
|
2888
|
+
*
|
|
2889
|
+
* @param input - Input string to parse
|
|
2890
|
+
* @param start - Start position in the input
|
|
2891
|
+
* @param end - End position in the input
|
|
2892
|
+
* @param stopCodes - Character codes that signal end of identifiers
|
|
2893
|
+
* @returns Identifiers parsing result
|
|
2894
|
+
*/
|
|
2895
|
+
function parseIdentifiers(input, start, end, stopCodes) {
|
|
2896
|
+
const identifiers = [];
|
|
2897
|
+
let pos = start;
|
|
2898
|
+
while (pos < end) {
|
|
2899
|
+
// Check for stop characters
|
|
2900
|
+
if (stopCodes.includes(input.charCodeAt(pos))) {
|
|
2901
|
+
break;
|
|
2902
|
+
}
|
|
2903
|
+
// Parse one identifier
|
|
2904
|
+
const identStart = pos;
|
|
2905
|
+
while (pos < end) {
|
|
2906
|
+
const code = input.charCodeAt(pos);
|
|
2907
|
+
// Stop at dot or stop characters
|
|
2908
|
+
if (code === 46 || stopCodes.includes(code)) {
|
|
2909
|
+
break;
|
|
2910
|
+
}
|
|
2911
|
+
// Must be alphanumeric or hyphen
|
|
2912
|
+
if (!isAlphanumeric(code) && code !== 45) {
|
|
2913
|
+
return { success: false, identifiers: [], endPos: pos, error: `Invalid character in identifier: "${input[pos]}"` };
|
|
2914
|
+
}
|
|
2915
|
+
pos++;
|
|
2916
|
+
}
|
|
2917
|
+
// Empty identifier is not allowed
|
|
2918
|
+
if (pos === identStart) {
|
|
2919
|
+
return { success: false, identifiers: [], endPos: pos, error: 'Empty identifier' };
|
|
2920
|
+
}
|
|
2921
|
+
identifiers.push(input.slice(identStart, pos));
|
|
2922
|
+
// Consume dot separator
|
|
2923
|
+
if (pos < end && input.charCodeAt(pos) === 46) {
|
|
2924
|
+
pos++;
|
|
2925
|
+
// Dot at end is invalid
|
|
2926
|
+
if (pos >= end || stopCodes.includes(input.charCodeAt(pos))) {
|
|
2927
|
+
return { success: false, identifiers: [], endPos: pos, error: 'Identifier expected after dot' };
|
|
2928
|
+
}
|
|
2929
|
+
}
|
|
2930
|
+
}
|
|
2931
|
+
return { success: true, identifiers, endPos: pos };
|
|
2932
|
+
}
|
|
2933
|
+
/**
|
|
2934
|
+
* Checks if a character code is a digit (0-9).
|
|
2935
|
+
*
|
|
2936
|
+
* @param code - Character code to check
|
|
2937
|
+
* @returns True if the code represents a digit
|
|
2938
|
+
*/
|
|
2939
|
+
function isDigit(code) {
|
|
2940
|
+
return code >= 48 && code <= 57;
|
|
2941
|
+
}
|
|
2942
|
+
/**
|
|
2943
|
+
* Checks if a character code is alphanumeric or hyphen.
|
|
2944
|
+
*
|
|
2945
|
+
* @param code - Character code to check
|
|
2946
|
+
* @returns True if the code represents an alphanumeric character
|
|
2947
|
+
*/
|
|
2948
|
+
function isAlphanumeric(code) {
|
|
2949
|
+
return ((code >= 48 && code <= 57) || // 0-9
|
|
2950
|
+
(code >= 65 && code <= 90) || // A-Z
|
|
2951
|
+
(code >= 97 && code <= 122) // a-z
|
|
2952
|
+
);
|
|
2953
|
+
}
|
|
2954
|
+
/**
|
|
2955
|
+
* Checks if a character code is whitespace.
|
|
2956
|
+
*
|
|
2957
|
+
* @param code - Character code to check
|
|
2958
|
+
* @returns True if the code represents whitespace
|
|
2959
|
+
*/
|
|
2960
|
+
function isWhitespace(code) {
|
|
2961
|
+
return code === 32 || code === 9 || code === 10 || code === 13;
|
|
2962
|
+
}
|
|
2963
|
+
|
|
2964
|
+
/**
|
|
2965
|
+
* Cascade Bump
|
|
2966
|
+
*
|
|
2967
|
+
* Calculates which packages need version bumps when dependencies change.
|
|
2968
|
+
* Implements cascade versioning for monorepos where dependents may need
|
|
2969
|
+
* to be bumped when their dependencies are updated.
|
|
2970
|
+
*/
|
|
2971
|
+
/**
|
|
2972
|
+
* Default cascade bump options.
|
|
2973
|
+
*/
|
|
2974
|
+
const DEFAULT_CASCADE_OPTIONS = {
|
|
2975
|
+
cascadeBumpType: 'patch',
|
|
2976
|
+
includeDevDependencies: false,
|
|
2977
|
+
includePeerDependencies: true,
|
|
2978
|
+
prereleaseId: 'alpha',
|
|
2979
|
+
};
|
|
2980
|
+
/**
|
|
2981
|
+
* Calculates cascade bumps for a workspace given direct bumps.
|
|
2982
|
+
*
|
|
2983
|
+
* When packages are directly bumped (e.g., due to commits), their dependents
|
|
2984
|
+
* may also need version bumps. This function calculates all affected packages.
|
|
2985
|
+
*
|
|
2986
|
+
* @param workspace - Workspace containing projects and dependency graph
|
|
2987
|
+
* @param directBumps - Packages with direct changes
|
|
2988
|
+
* @param options - Configuration for cascade bump calculation
|
|
2989
|
+
* @returns Cascade bump result
|
|
2990
|
+
*
|
|
2991
|
+
* @example
|
|
2992
|
+
* ```typescript
|
|
2993
|
+
* import { calculateCascadeBumps } from '@hyperfrontend/versioning'
|
|
2994
|
+
*
|
|
2995
|
+
* // If lib-utils is getting a minor bump
|
|
2996
|
+
* const result = calculateCascadeBumps(workspace, [
|
|
2997
|
+
* { name: 'lib-utils', bumpType: 'minor' }
|
|
2998
|
+
* ])
|
|
2999
|
+
*
|
|
3000
|
+
* // result.bumps includes lib-utils and all packages that depend on it
|
|
3001
|
+
* for (const bump of result.bumps) {
|
|
3002
|
+
* console.log(`${bump.name}: ${bump.currentVersion} -> ${bump.nextVersion}`)
|
|
3003
|
+
* }
|
|
3004
|
+
* ```
|
|
3005
|
+
*/
|
|
3006
|
+
function calculateCascadeBumps(workspace, directBumps, options = {}) {
|
|
3007
|
+
const opts = { ...DEFAULT_CASCADE_OPTIONS, ...options };
|
|
3008
|
+
const directBumpMap = createMap(directBumps.map((b) => [b.name, b]));
|
|
3009
|
+
const allBumps = createMap();
|
|
3010
|
+
// Process direct bumps first
|
|
3011
|
+
for (const input of directBumps) {
|
|
3012
|
+
const project = workspace.projects.get(input.name);
|
|
3013
|
+
if (!project) {
|
|
3014
|
+
continue;
|
|
3015
|
+
}
|
|
3016
|
+
const planned = createPlannedBump(project, input.bumpType, 'direct', [], opts.prereleaseId);
|
|
3017
|
+
allBumps.set(input.name, planned);
|
|
3018
|
+
}
|
|
3019
|
+
// Calculate cascade bumps
|
|
3020
|
+
const processed = createSet();
|
|
3021
|
+
const queue = [...directBumps.map((b) => b.name)];
|
|
3022
|
+
while (queue.length > 0) {
|
|
3023
|
+
const current = queue.shift();
|
|
3024
|
+
if (current === undefined || processed.has(current)) {
|
|
3025
|
+
continue;
|
|
3026
|
+
}
|
|
3027
|
+
processed.add(current);
|
|
3028
|
+
// Get dependents
|
|
3029
|
+
const dependents = getTransitiveDependents(workspace, current);
|
|
3030
|
+
for (const depName of dependents) {
|
|
3031
|
+
// Skip if already has a direct bump
|
|
3032
|
+
if (directBumpMap.has(depName)) {
|
|
3033
|
+
continue;
|
|
3034
|
+
}
|
|
3035
|
+
// Skip if already planned
|
|
3036
|
+
if (allBumps.has(depName)) {
|
|
3037
|
+
// Update triggered by list
|
|
3038
|
+
const existing = allBumps.get(depName);
|
|
3039
|
+
if (existing && !existing.triggeredBy.includes(current)) {
|
|
3040
|
+
allBumps.set(depName, {
|
|
3041
|
+
...existing,
|
|
3042
|
+
triggeredBy: [...existing.triggeredBy, current],
|
|
3043
|
+
});
|
|
3044
|
+
}
|
|
3045
|
+
continue;
|
|
3046
|
+
}
|
|
3047
|
+
const project = workspace.projects.get(depName);
|
|
3048
|
+
if (!project) {
|
|
3049
|
+
continue;
|
|
3050
|
+
}
|
|
3051
|
+
// Check if we should include this dependent based on dependency type
|
|
3052
|
+
if (!shouldCascade(workspace, depName, current, opts)) {
|
|
3053
|
+
continue;
|
|
3054
|
+
}
|
|
3055
|
+
const planned = createPlannedBump(project, opts.cascadeBumpType, 'cascade', [current], opts.prereleaseId);
|
|
3056
|
+
allBumps.set(depName, planned);
|
|
3057
|
+
// Continue cascading from this dependent
|
|
3058
|
+
queue.push(depName);
|
|
3059
|
+
}
|
|
3060
|
+
}
|
|
3061
|
+
// Convert to arrays and categorize
|
|
3062
|
+
const bumps = [...allBumps.values()];
|
|
3063
|
+
const directBumpsArray = bumps.filter((b) => b.reason === 'direct');
|
|
3064
|
+
const cascadeBumpsArray = bumps.filter((b) => b.reason === 'cascade');
|
|
3065
|
+
// Sort bumps by name for consistent output
|
|
3066
|
+
bumps.sort((a, b) => a.name.localeCompare(b.name));
|
|
3067
|
+
return {
|
|
3068
|
+
bumps,
|
|
3069
|
+
directBumps: directBumpsArray,
|
|
3070
|
+
cascadeBumps: cascadeBumpsArray,
|
|
3071
|
+
totalAffected: bumps.length,
|
|
3072
|
+
};
|
|
3073
|
+
}
|
|
3074
|
+
/**
|
|
3075
|
+
* Determines if a cascade should propagate based on dependency type.
|
|
3076
|
+
*
|
|
3077
|
+
* @param workspace - Workspace containing the project
|
|
3078
|
+
* @param dependent - Name of the dependent package
|
|
3079
|
+
* @param dependency - Name of the dependency being bumped
|
|
3080
|
+
* @param opts - Cascade bump options
|
|
3081
|
+
* @returns True if the bump should cascade to this dependent
|
|
3082
|
+
*/
|
|
3083
|
+
function shouldCascade(workspace, dependent, dependency, opts) {
|
|
3084
|
+
const project = workspace.projects.get(dependent);
|
|
3085
|
+
if (!project) {
|
|
3086
|
+
return false;
|
|
3087
|
+
}
|
|
3088
|
+
const pkg = project.packageJson;
|
|
3089
|
+
// Check production dependencies (always cascades)
|
|
3090
|
+
if (pkg.dependencies?.[dependency]) {
|
|
3091
|
+
return true;
|
|
3092
|
+
}
|
|
3093
|
+
// Check dev dependencies
|
|
3094
|
+
if (opts.includeDevDependencies && pkg.devDependencies?.[dependency]) {
|
|
3095
|
+
return true;
|
|
3096
|
+
}
|
|
3097
|
+
// Check peer dependencies
|
|
3098
|
+
if (opts.includePeerDependencies && pkg.peerDependencies?.[dependency]) {
|
|
3099
|
+
return true;
|
|
3100
|
+
}
|
|
3101
|
+
return false;
|
|
3102
|
+
}
|
|
3103
|
+
/**
|
|
3104
|
+
* Creates a planned bump for a project.
|
|
3105
|
+
*
|
|
3106
|
+
* @param project - Project to create bump plan for
|
|
3107
|
+
* @param bumpType - Type of version bump to apply
|
|
3108
|
+
* @param reason - Reason for the bump (direct, cascade, or sync)
|
|
3109
|
+
* @param triggeredBy - List of packages that triggered this bump
|
|
3110
|
+
* @param prereleaseId - Optional prerelease identifier
|
|
3111
|
+
* @returns Planned bump object with version information
|
|
3112
|
+
*/
|
|
3113
|
+
function createPlannedBump(project, bumpType, reason, triggeredBy, prereleaseId) {
|
|
3114
|
+
const parseResult = parseVersion(project.version);
|
|
3115
|
+
if (!parseResult.success || !parseResult.version) {
|
|
3116
|
+
throw createError(`Invalid version for ${project.name}: ${project.version}`);
|
|
3117
|
+
}
|
|
3118
|
+
const next = computeNextVersion(parseResult.version, bumpType, prereleaseId);
|
|
3119
|
+
return {
|
|
3120
|
+
name: project.name,
|
|
3121
|
+
currentVersion: project.version,
|
|
3122
|
+
nextVersion: format(next),
|
|
3123
|
+
bumpType,
|
|
3124
|
+
reason,
|
|
3125
|
+
triggeredBy,
|
|
3126
|
+
};
|
|
3127
|
+
}
|
|
3128
|
+
/**
|
|
3129
|
+
* Computes the next version based on bump type.
|
|
3130
|
+
*
|
|
3131
|
+
* @param current - Current semantic version
|
|
3132
|
+
* @param bumpType - Type of version bump to apply
|
|
3133
|
+
* @param prereleaseId - Optional prerelease identifier
|
|
3134
|
+
* @returns New semantic version after bump
|
|
3135
|
+
*/
|
|
3136
|
+
function computeNextVersion(current, bumpType, prereleaseId) {
|
|
3137
|
+
if (bumpType === 'none') {
|
|
3138
|
+
return current;
|
|
3139
|
+
}
|
|
3140
|
+
return increment(current, bumpType, prereleaseId);
|
|
3141
|
+
}
|
|
3142
|
+
/**
|
|
3143
|
+
* Calculates cascade bumps starting from a single package.
|
|
3144
|
+
*
|
|
3145
|
+
* @param workspace - Workspace containing projects and dependency graph
|
|
3146
|
+
* @param packageName - Package with direct changes
|
|
3147
|
+
* @param bumpType - Type of bump for the direct change
|
|
3148
|
+
* @param options - Configuration for cascade behavior
|
|
3149
|
+
* @returns Cascade bump result
|
|
3150
|
+
*/
|
|
3151
|
+
function calculateCascadeBumpsFromPackage(workspace, packageName, bumpType, options = {}) {
|
|
3152
|
+
return calculateCascadeBumps(workspace, [{ name: packageName, bumpType }], options);
|
|
3153
|
+
}
|
|
3154
|
+
/**
|
|
3155
|
+
* Gets a summary of the cascade bump calculation.
|
|
3156
|
+
*
|
|
3157
|
+
* @param result - Result object from cascade bump calculation
|
|
3158
|
+
* @returns Human-readable summary
|
|
3159
|
+
*/
|
|
3160
|
+
function summarizeCascadeBumps(result) {
|
|
3161
|
+
if (result.totalAffected === 0) {
|
|
3162
|
+
return 'No packages affected';
|
|
3163
|
+
}
|
|
3164
|
+
const lines = [];
|
|
3165
|
+
lines.push(`${result.totalAffected} package(s) affected:`);
|
|
3166
|
+
lines.push(` - ${result.directBumps.length} direct bump(s)`);
|
|
3167
|
+
lines.push(` - ${result.cascadeBumps.length} cascade bump(s)`);
|
|
3168
|
+
lines.push('');
|
|
3169
|
+
lines.push('Planned bumps:');
|
|
3170
|
+
for (const bump of result.bumps) {
|
|
3171
|
+
const suffix = bump.reason === 'cascade' ? ` (triggered by ${bump.triggeredBy.join(', ')})` : '';
|
|
3172
|
+
lines.push(` ${bump.name}: ${bump.currentVersion} -> ${bump.nextVersion} [${bump.bumpType}]${suffix}`);
|
|
3173
|
+
}
|
|
3174
|
+
return lines.join('\n');
|
|
3175
|
+
}
|
|
3176
|
+
|
|
3177
|
+
/**
|
|
3178
|
+
* Batch Update
|
|
3179
|
+
*
|
|
3180
|
+
* Utilities for updating multiple packages at once.
|
|
3181
|
+
* Supports updating versions, dependencies, and other package.json fields.
|
|
3182
|
+
*/
|
|
3183
|
+
/**
|
|
3184
|
+
* Default batch update options.
|
|
3185
|
+
*/
|
|
3186
|
+
const DEFAULT_BATCH_UPDATE_OPTIONS = {
|
|
3187
|
+
dryRun: false,
|
|
3188
|
+
updateDependencyReferences: true,
|
|
3189
|
+
};
|
|
3190
|
+
/**
|
|
3191
|
+
* Applies planned bumps to the workspace.
|
|
3192
|
+
* Updates package.json version fields for all affected packages.
|
|
3193
|
+
*
|
|
3194
|
+
* @param workspace - Workspace containing projects to update
|
|
3195
|
+
* @param bumps - Planned version bumps
|
|
3196
|
+
* @param options - Update options
|
|
3197
|
+
* @returns Batch update result
|
|
3198
|
+
*
|
|
3199
|
+
* @example
|
|
3200
|
+
* ```typescript
|
|
3201
|
+
* import { applyBumps, calculateCascadeBumps } from '@hyperfrontend/versioning'
|
|
3202
|
+
*
|
|
3203
|
+
* const cascadeResult = calculateCascadeBumps(workspace, directBumps)
|
|
3204
|
+
* const updateResult = applyBumps(workspace, cascadeResult.bumps)
|
|
3205
|
+
*
|
|
3206
|
+
* if (updateResult.success) {
|
|
3207
|
+
* console.log(`Updated ${updateResult.updated.length} packages`)
|
|
3208
|
+
* } else {
|
|
3209
|
+
* console.error('Some updates failed:', updateResult.failed)
|
|
3210
|
+
* }
|
|
3211
|
+
* ```
|
|
3212
|
+
*/
|
|
3213
|
+
function applyBumps(workspace, bumps, options = {}) {
|
|
3214
|
+
const opts = { ...DEFAULT_BATCH_UPDATE_OPTIONS, ...options };
|
|
3215
|
+
const updated = [];
|
|
3216
|
+
const failed = [];
|
|
3217
|
+
// Build a map for dependency reference updates
|
|
3218
|
+
const versionUpdates = createMap();
|
|
3219
|
+
for (const bump of bumps) {
|
|
3220
|
+
versionUpdates.set(bump.name, bump.nextVersion);
|
|
3221
|
+
}
|
|
3222
|
+
for (const bump of bumps) {
|
|
3223
|
+
const project = workspace.projects.get(bump.name);
|
|
3224
|
+
if (!project) {
|
|
3225
|
+
failed.push({
|
|
3226
|
+
name: bump.name,
|
|
3227
|
+
packageJsonPath: '',
|
|
3228
|
+
error: 'Project not found in workspace',
|
|
3229
|
+
});
|
|
3230
|
+
continue;
|
|
3231
|
+
}
|
|
3232
|
+
try {
|
|
3233
|
+
if (!opts.dryRun) {
|
|
3234
|
+
updatePackageVersion(project.packageJsonPath, bump.nextVersion);
|
|
3235
|
+
}
|
|
3236
|
+
updated.push({
|
|
3237
|
+
name: bump.name,
|
|
3238
|
+
packageJsonPath: project.packageJsonPath,
|
|
3239
|
+
previousVersion: bump.currentVersion,
|
|
3240
|
+
newVersion: bump.nextVersion,
|
|
3241
|
+
});
|
|
3242
|
+
}
|
|
3243
|
+
catch (error) {
|
|
3244
|
+
failed.push({
|
|
3245
|
+
name: bump.name,
|
|
3246
|
+
packageJsonPath: project.packageJsonPath,
|
|
3247
|
+
error: error instanceof Error ? error.message : String(error),
|
|
3248
|
+
});
|
|
3249
|
+
}
|
|
3250
|
+
}
|
|
3251
|
+
// Update dependency references if requested
|
|
3252
|
+
if (opts.updateDependencyReferences && !opts.dryRun) {
|
|
3253
|
+
for (const project of workspace.projectList) {
|
|
3254
|
+
try {
|
|
3255
|
+
updateDependencyReferences(project.packageJsonPath, versionUpdates);
|
|
3256
|
+
}
|
|
3257
|
+
catch {
|
|
3258
|
+
// Dependency reference updates are best-effort
|
|
3259
|
+
}
|
|
3260
|
+
}
|
|
3261
|
+
}
|
|
3262
|
+
return {
|
|
3263
|
+
updated,
|
|
3264
|
+
failed,
|
|
3265
|
+
total: bumps.length,
|
|
3266
|
+
success: failed.length === 0,
|
|
3267
|
+
};
|
|
3268
|
+
}
|
|
3269
|
+
/**
|
|
3270
|
+
* Updates the version field in a package.json file.
|
|
3271
|
+
*
|
|
3272
|
+
* @param packageJsonPath - Path to package.json
|
|
3273
|
+
* @param newVersion - New version string
|
|
3274
|
+
*/
|
|
3275
|
+
function updatePackageVersion(packageJsonPath, newVersion) {
|
|
3276
|
+
const content = readFileContent(packageJsonPath);
|
|
3277
|
+
const pkg = parse(content);
|
|
3278
|
+
pkg.version = newVersion;
|
|
3279
|
+
const formatted = stringify(pkg, null, 2) + '\n';
|
|
3280
|
+
writeFileContent(packageJsonPath, formatted);
|
|
3281
|
+
}
|
|
3282
|
+
/**
|
|
3283
|
+
* Updates dependency version references in a package.json file.
|
|
3284
|
+
*
|
|
3285
|
+
* @param packageJsonPath - Path to package.json
|
|
3286
|
+
* @param versionUpdates - Map of package name to new version
|
|
3287
|
+
*/
|
|
3288
|
+
function updateDependencyReferences(packageJsonPath, versionUpdates) {
|
|
3289
|
+
const content = readFileContent(packageJsonPath);
|
|
3290
|
+
const pkg = parse(content);
|
|
3291
|
+
let modified = false;
|
|
3292
|
+
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];
|
|
3293
|
+
for (const depType of depTypes) {
|
|
3294
|
+
const deps = pkg[depType];
|
|
3295
|
+
if (deps) {
|
|
3296
|
+
for (const [name, newVersion] of versionUpdates) {
|
|
3297
|
+
if (deps[name]) {
|
|
3298
|
+
// Preserve version prefix (^, ~, etc.) or use exact version
|
|
3299
|
+
const currentRange = deps[name];
|
|
3300
|
+
const prefix = extractVersionPrefix(currentRange);
|
|
3301
|
+
deps[name] = prefix + newVersion;
|
|
3302
|
+
modified = true;
|
|
3303
|
+
}
|
|
3304
|
+
}
|
|
3305
|
+
}
|
|
3306
|
+
}
|
|
3307
|
+
if (modified) {
|
|
3308
|
+
const formatted = stringify(pkg, null, 2) + '\n';
|
|
3309
|
+
writeFileContent(packageJsonPath, formatted);
|
|
3310
|
+
}
|
|
3311
|
+
}
|
|
3312
|
+
/**
|
|
3313
|
+
* Extracts the version prefix from a version range.
|
|
3314
|
+
*
|
|
3315
|
+
* @param versionRange - Version range string
|
|
3316
|
+
* @returns The prefix (^, ~, >=, etc.) or empty string
|
|
3317
|
+
*/
|
|
3318
|
+
function extractVersionPrefix(versionRange) {
|
|
3319
|
+
if (versionRange.startsWith('^'))
|
|
3320
|
+
return '^';
|
|
3321
|
+
if (versionRange.startsWith('~'))
|
|
3322
|
+
return '~';
|
|
3323
|
+
if (versionRange.startsWith('>='))
|
|
3324
|
+
return '>=';
|
|
3325
|
+
if (versionRange.startsWith('>'))
|
|
3326
|
+
return '>';
|
|
3327
|
+
if (versionRange.startsWith('<='))
|
|
3328
|
+
return '<=';
|
|
3329
|
+
if (versionRange.startsWith('<'))
|
|
3330
|
+
return '<';
|
|
3331
|
+
if (versionRange.startsWith('='))
|
|
3332
|
+
return '=';
|
|
3333
|
+
return '';
|
|
3334
|
+
}
|
|
3335
|
+
/**
|
|
3336
|
+
* Updates a package.json file using a VFS Tree.
|
|
3337
|
+
*
|
|
3338
|
+
* @param tree - Virtual file system tree
|
|
3339
|
+
* @param packageJsonPath - Relative path to package.json
|
|
3340
|
+
* @param newVersion - New version string
|
|
3341
|
+
*/
|
|
3342
|
+
function updatePackageVersionInTree(tree, packageJsonPath, newVersion) {
|
|
3343
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
3344
|
+
if (!content) {
|
|
3345
|
+
throw createError(`Could not read ${packageJsonPath}`);
|
|
3346
|
+
}
|
|
3347
|
+
const pkg = parse(content);
|
|
3348
|
+
pkg.version = newVersion;
|
|
3349
|
+
const formatted = stringify(pkg, null, 2) + '\n';
|
|
3350
|
+
tree.write(packageJsonPath, formatted);
|
|
3351
|
+
}
|
|
3352
|
+
/**
|
|
3353
|
+
* Creates a summary of the batch update result.
|
|
3354
|
+
*
|
|
3355
|
+
* @param result - Result object from batch update operation
|
|
3356
|
+
* @returns Human-readable summary
|
|
3357
|
+
*/
|
|
3358
|
+
function summarizeBatchUpdate(result) {
|
|
3359
|
+
const lines = [];
|
|
3360
|
+
if (result.success) {
|
|
3361
|
+
lines.push(`Successfully updated ${result.updated.length} package(s)`);
|
|
3362
|
+
}
|
|
3363
|
+
else {
|
|
3364
|
+
lines.push(`Updated ${result.updated.length}/${result.total} package(s)`);
|
|
3365
|
+
lines.push(`Failed: ${result.failed.length} package(s)`);
|
|
3366
|
+
}
|
|
3367
|
+
if (result.updated.length > 0) {
|
|
3368
|
+
lines.push('');
|
|
3369
|
+
lines.push('Updated packages:');
|
|
3370
|
+
for (const pkg of result.updated) {
|
|
3371
|
+
lines.push(` ${pkg.name}: ${pkg.previousVersion} -> ${pkg.newVersion}`);
|
|
3372
|
+
}
|
|
3373
|
+
}
|
|
3374
|
+
if (result.failed.length > 0) {
|
|
3375
|
+
lines.push('');
|
|
3376
|
+
lines.push('Failed packages:');
|
|
3377
|
+
for (const pkg of result.failed) {
|
|
3378
|
+
lines.push(` ${pkg.name}: ${pkg.error}`);
|
|
3379
|
+
}
|
|
3380
|
+
}
|
|
3381
|
+
return lines.join('\n');
|
|
3382
|
+
}
|
|
3383
|
+
|
|
3384
|
+
/**
|
|
3385
|
+
* Compares two semantic versions.
|
|
3386
|
+
*
|
|
3387
|
+
* @param a - First version
|
|
3388
|
+
* @param b - Second version
|
|
3389
|
+
* @returns -1 if a < b, 0 if a == b, 1 if a > b
|
|
3390
|
+
*
|
|
3391
|
+
* @example
|
|
3392
|
+
* compare(parseVersion('1.0.0'), parseVersion('2.0.0')) // -1
|
|
3393
|
+
* compare(parseVersion('1.0.0'), parseVersion('1.0.0')) // 0
|
|
3394
|
+
* compare(parseVersion('2.0.0'), parseVersion('1.0.0')) // 1
|
|
3395
|
+
*/
|
|
3396
|
+
function compare(a, b) {
|
|
3397
|
+
// Compare major, minor, patch
|
|
3398
|
+
if (a.major !== b.major) {
|
|
3399
|
+
return a.major < b.major ? -1 : 1;
|
|
3400
|
+
}
|
|
3401
|
+
if (a.minor !== b.minor) {
|
|
3402
|
+
return a.minor < b.minor ? -1 : 1;
|
|
3403
|
+
}
|
|
3404
|
+
if (a.patch !== b.patch) {
|
|
3405
|
+
return a.patch < b.patch ? -1 : 1;
|
|
3406
|
+
}
|
|
3407
|
+
// Compare prerelease
|
|
3408
|
+
// Version with prerelease has lower precedence than release
|
|
3409
|
+
if (a.prerelease.length === 0 && b.prerelease.length > 0) {
|
|
3410
|
+
return 1; // a is release, b is prerelease -> a > b
|
|
3411
|
+
}
|
|
3412
|
+
if (a.prerelease.length > 0 && b.prerelease.length === 0) {
|
|
3413
|
+
return -1; // a is prerelease, b is release -> a < b
|
|
3414
|
+
}
|
|
3415
|
+
// Both have prerelease - compare identifiers
|
|
3416
|
+
const maxLen = max(a.prerelease.length, b.prerelease.length);
|
|
3417
|
+
for (let i = 0; i < maxLen; i++) {
|
|
3418
|
+
const aId = a.prerelease[i];
|
|
3419
|
+
const bId = b.prerelease[i];
|
|
3420
|
+
// Shorter prerelease array has lower precedence
|
|
3421
|
+
if (aId === undefined && bId !== undefined) {
|
|
3422
|
+
return -1;
|
|
3423
|
+
}
|
|
3424
|
+
if (aId !== undefined && bId === undefined) {
|
|
3425
|
+
return 1;
|
|
3426
|
+
}
|
|
3427
|
+
if (aId === undefined || bId === undefined) {
|
|
3428
|
+
continue;
|
|
3429
|
+
}
|
|
3430
|
+
// Compare identifiers
|
|
3431
|
+
const cmp = compareIdentifiers(aId, bId);
|
|
3432
|
+
if (cmp !== 0) {
|
|
3433
|
+
return cmp;
|
|
3434
|
+
}
|
|
3435
|
+
}
|
|
3436
|
+
return 0;
|
|
3437
|
+
}
|
|
3438
|
+
/**
|
|
3439
|
+
* Checks if a version satisfies a comparator.
|
|
3440
|
+
*
|
|
3441
|
+
* @param version - Version to check
|
|
3442
|
+
* @param comparator - Comparator to test against
|
|
3443
|
+
* @returns True if version satisfies the comparator
|
|
3444
|
+
*/
|
|
3445
|
+
function satisfiesComparator(version, comparator) {
|
|
3446
|
+
const cmp = compare(version, comparator.version);
|
|
3447
|
+
switch (comparator.operator) {
|
|
3448
|
+
case '=':
|
|
3449
|
+
return cmp === 0;
|
|
3450
|
+
case '>':
|
|
3451
|
+
return cmp === 1;
|
|
3452
|
+
case '>=':
|
|
3453
|
+
return cmp >= 0;
|
|
3454
|
+
case '<':
|
|
3455
|
+
return cmp === -1;
|
|
3456
|
+
case '<=':
|
|
3457
|
+
return cmp <= 0;
|
|
3458
|
+
case '^':
|
|
3459
|
+
case '~':
|
|
3460
|
+
// These should have been expanded during parsing
|
|
3461
|
+
// If we encounter them here, treat as >=
|
|
3462
|
+
return cmp >= 0;
|
|
3463
|
+
default:
|
|
3464
|
+
return false;
|
|
3465
|
+
}
|
|
3466
|
+
}
|
|
3467
|
+
/**
|
|
3468
|
+
* Checks if a version satisfies a range.
|
|
3469
|
+
*
|
|
3470
|
+
* @param version - Version to check
|
|
3471
|
+
* @param range - Range to test against
|
|
3472
|
+
* @returns True if version satisfies the range
|
|
3473
|
+
*
|
|
3474
|
+
* @example
|
|
3475
|
+
* satisfies(parseVersion('1.2.3'), parseRange('^1.0.0')) // true
|
|
3476
|
+
* satisfies(parseVersion('2.0.0'), parseRange('^1.0.0')) // false
|
|
3477
|
+
*/
|
|
3478
|
+
function satisfies(version, range) {
|
|
3479
|
+
// Empty range matches any
|
|
3480
|
+
if (range.sets.length === 0) {
|
|
3481
|
+
return true;
|
|
3482
|
+
}
|
|
3483
|
+
// OR logic: at least one set must be satisfied
|
|
3484
|
+
for (const set of range.sets) {
|
|
3485
|
+
// AND logic: all comparators in set must be satisfied
|
|
3486
|
+
let allSatisfied = true;
|
|
3487
|
+
// Empty comparator set matches any
|
|
3488
|
+
if (set.comparators.length === 0) {
|
|
3489
|
+
return true;
|
|
3490
|
+
}
|
|
3491
|
+
for (const comp of set.comparators) {
|
|
3492
|
+
if (!satisfiesComparator(version, comp)) {
|
|
3493
|
+
allSatisfied = false;
|
|
3494
|
+
break;
|
|
3495
|
+
}
|
|
3496
|
+
}
|
|
3497
|
+
if (allSatisfied) {
|
|
3498
|
+
return true;
|
|
3499
|
+
}
|
|
3500
|
+
}
|
|
3501
|
+
return false;
|
|
3502
|
+
}
|
|
3503
|
+
// ============================================================================
|
|
3504
|
+
// Internal helpers
|
|
3505
|
+
// ============================================================================
|
|
3506
|
+
/**
|
|
3507
|
+
* Compares two prerelease identifiers.
|
|
3508
|
+
* Numeric identifiers have lower precedence than alphanumeric.
|
|
3509
|
+
* Numeric identifiers are compared numerically.
|
|
3510
|
+
* Alphanumeric identifiers are compared lexically.
|
|
3511
|
+
*
|
|
3512
|
+
* @param a - First prerelease identifier
|
|
3513
|
+
* @param b - Second prerelease identifier
|
|
3514
|
+
* @returns -1 if a < b, 0 if equal, 1 if a > b
|
|
3515
|
+
*/
|
|
3516
|
+
function compareIdentifiers(a, b) {
|
|
3517
|
+
const aIsNumeric = isNumeric(a);
|
|
3518
|
+
const bIsNumeric = isNumeric(b);
|
|
3519
|
+
// Numeric identifiers have lower precedence
|
|
3520
|
+
if (aIsNumeric && !bIsNumeric) {
|
|
3521
|
+
return -1;
|
|
3522
|
+
}
|
|
3523
|
+
if (!aIsNumeric && bIsNumeric) {
|
|
3524
|
+
return 1;
|
|
3525
|
+
}
|
|
3526
|
+
// Both numeric - compare as numbers
|
|
3527
|
+
if (aIsNumeric && bIsNumeric) {
|
|
3528
|
+
const aNum = parseInt(a, 10);
|
|
3529
|
+
const bNum = parseInt(b, 10);
|
|
3530
|
+
if (aNum < bNum)
|
|
3531
|
+
return -1;
|
|
3532
|
+
if (aNum > bNum)
|
|
3533
|
+
return 1;
|
|
3534
|
+
return 0;
|
|
3535
|
+
}
|
|
3536
|
+
// Both alphanumeric - compare lexically
|
|
3537
|
+
if (a < b)
|
|
3538
|
+
return -1;
|
|
3539
|
+
if (a > b)
|
|
3540
|
+
return 1;
|
|
3541
|
+
return 0;
|
|
3542
|
+
}
|
|
3543
|
+
/**
|
|
3544
|
+
* Checks if a string consists only of digits.
|
|
3545
|
+
*
|
|
3546
|
+
* @param str - String to check for numeric content
|
|
3547
|
+
* @returns True if string contains only digits
|
|
3548
|
+
*/
|
|
3549
|
+
function isNumeric(str) {
|
|
3550
|
+
if (str.length === 0)
|
|
3551
|
+
return false;
|
|
3552
|
+
for (let i = 0; i < str.length; i++) {
|
|
3553
|
+
const code = str.charCodeAt(i);
|
|
3554
|
+
if (code < 48 || code > 57) {
|
|
3555
|
+
return false;
|
|
3556
|
+
}
|
|
3557
|
+
}
|
|
3558
|
+
return true;
|
|
3559
|
+
}
|
|
3560
|
+
|
|
3561
|
+
/**
|
|
3562
|
+
* Creates a new Comparator.
|
|
3563
|
+
*
|
|
3564
|
+
* @param operator - The comparison operator
|
|
3565
|
+
* @param version - The version to compare against
|
|
3566
|
+
* @returns A new Comparator
|
|
3567
|
+
*/
|
|
3568
|
+
function createComparator(operator, version) {
|
|
3569
|
+
return { operator, version };
|
|
3570
|
+
}
|
|
3571
|
+
/**
|
|
3572
|
+
* Creates a new ComparatorSet.
|
|
3573
|
+
*
|
|
3574
|
+
* @param comparators - Array of comparators (AND logic)
|
|
3575
|
+
* @returns A new ComparatorSet
|
|
3576
|
+
*/
|
|
3577
|
+
function createComparatorSet(comparators) {
|
|
3578
|
+
return { comparators };
|
|
3579
|
+
}
|
|
3580
|
+
/**
|
|
3581
|
+
* Creates a new Range.
|
|
3582
|
+
*
|
|
3583
|
+
* @param sets - Array of comparator sets (OR logic)
|
|
3584
|
+
* @param raw - Original raw string
|
|
3585
|
+
* @returns A new Range
|
|
3586
|
+
*/
|
|
3587
|
+
function createRange(sets, raw) {
|
|
3588
|
+
return { sets, raw };
|
|
3589
|
+
}
|
|
3590
|
+
|
|
3591
|
+
/**
|
|
3592
|
+
* Maximum range string length.
|
|
3593
|
+
*/
|
|
3594
|
+
const MAX_RANGE_LENGTH = 1024;
|
|
3595
|
+
/**
|
|
3596
|
+
* Parses a semver range string.
|
|
3597
|
+
*
|
|
3598
|
+
* Supports:
|
|
3599
|
+
* - Exact: 1.2.3, =1.2.3
|
|
3600
|
+
* - Comparators: >1.0.0, >=1.0.0, <2.0.0, <=2.0.0
|
|
3601
|
+
* - Caret: ^1.2.3 (compatible with version)
|
|
3602
|
+
* - Tilde: ~1.2.3 (approximately equivalent)
|
|
3603
|
+
* - X-ranges: 1.x, 1.2.x, *
|
|
3604
|
+
* - Hyphen ranges: 1.0.0 - 2.0.0
|
|
3605
|
+
* - OR: 1.0.0 || 2.0.0
|
|
3606
|
+
* - AND: >=1.0.0 <2.0.0
|
|
3607
|
+
*
|
|
3608
|
+
* @param input - The range string to parse
|
|
3609
|
+
* @returns A ParseRangeResult with the parsed range or error
|
|
3610
|
+
*/
|
|
3611
|
+
function parseRange(input) {
|
|
3612
|
+
if (!input || typeof input !== 'string') {
|
|
3613
|
+
return { success: false, error: 'Range string is required' };
|
|
3614
|
+
}
|
|
3615
|
+
if (input.length > MAX_RANGE_LENGTH) {
|
|
3616
|
+
return { success: false, error: `Range string exceeds maximum length of ${MAX_RANGE_LENGTH}` };
|
|
3617
|
+
}
|
|
3618
|
+
// Trim whitespace
|
|
3619
|
+
const trimmed = input.trim();
|
|
3620
|
+
// Handle wildcard/any
|
|
3621
|
+
if (trimmed === '' || trimmed === '*' || trimmed.toLowerCase() === 'x') {
|
|
3622
|
+
return { success: true, range: createRange([], input) };
|
|
3623
|
+
}
|
|
3624
|
+
// Split by || for OR logic
|
|
3625
|
+
const orParts = splitByOr(trimmed);
|
|
3626
|
+
const sets = [];
|
|
3627
|
+
for (const part of orParts) {
|
|
3628
|
+
const setResult = parseComparatorSet(part.trim());
|
|
3629
|
+
if (!setResult.success) {
|
|
3630
|
+
return { success: false, error: setResult.error };
|
|
3631
|
+
}
|
|
3632
|
+
if (setResult.set) {
|
|
3633
|
+
sets.push(setResult.set);
|
|
3634
|
+
}
|
|
3635
|
+
}
|
|
3636
|
+
return { success: true, range: createRange(sets, input) };
|
|
3637
|
+
}
|
|
3638
|
+
/**
|
|
3639
|
+
* Splits a string by || delimiter, respecting nesting.
|
|
3640
|
+
*
|
|
3641
|
+
* @param input - Range string containing OR groups
|
|
3642
|
+
* @returns Array of OR-separated parts
|
|
3643
|
+
*/
|
|
3644
|
+
function splitByOr(input) {
|
|
3645
|
+
const parts = [];
|
|
3646
|
+
let current = '';
|
|
3647
|
+
let pos = 0;
|
|
3648
|
+
while (pos < input.length) {
|
|
3649
|
+
if (input[pos] === '|' && pos + 1 < input.length && input[pos + 1] === '|') {
|
|
3650
|
+
parts.push(current);
|
|
3651
|
+
current = '';
|
|
3652
|
+
pos += 2;
|
|
3653
|
+
}
|
|
3654
|
+
else {
|
|
3655
|
+
current += input[pos];
|
|
3656
|
+
pos++;
|
|
3657
|
+
}
|
|
3658
|
+
}
|
|
3659
|
+
parts.push(current);
|
|
3660
|
+
return parts;
|
|
3661
|
+
}
|
|
3662
|
+
/**
|
|
3663
|
+
* Parses a single comparator set (space-separated comparators = AND logic).
|
|
3664
|
+
*
|
|
3665
|
+
* @param input - Comparator set string
|
|
3666
|
+
* @returns Parsed set result
|
|
3667
|
+
*/
|
|
3668
|
+
function parseComparatorSet(input) {
|
|
3669
|
+
if (!input || input.trim() === '') {
|
|
3670
|
+
return { success: true }; // Empty set matches any
|
|
3671
|
+
}
|
|
3672
|
+
const trimmed = input.trim();
|
|
3673
|
+
// Check for hyphen range: "1.0.0 - 2.0.0"
|
|
3674
|
+
const hyphenMatch = parseHyphenRange(trimmed);
|
|
3675
|
+
if (hyphenMatch.isHyphenRange) {
|
|
3676
|
+
if (!hyphenMatch.success) {
|
|
3677
|
+
return { success: false, error: hyphenMatch.error };
|
|
3678
|
+
}
|
|
3679
|
+
return { success: true, set: hyphenMatch.set };
|
|
3680
|
+
}
|
|
3681
|
+
// Split by whitespace for AND logic
|
|
3682
|
+
const tokens = splitByWhitespace(trimmed);
|
|
3683
|
+
const comparators = [];
|
|
3684
|
+
for (const token of tokens) {
|
|
3685
|
+
const compResult = parseSingleComparator(token);
|
|
3686
|
+
if (!compResult.success) {
|
|
3687
|
+
return { success: false, error: compResult.error };
|
|
3688
|
+
}
|
|
3689
|
+
if (compResult.comparators) {
|
|
3690
|
+
comparators.push(...compResult.comparators);
|
|
3691
|
+
}
|
|
3692
|
+
}
|
|
3693
|
+
if (comparators.length === 0) {
|
|
3694
|
+
return { success: true }; // Empty matches any
|
|
3695
|
+
}
|
|
3696
|
+
return { success: true, set: createComparatorSet(comparators) };
|
|
3697
|
+
}
|
|
3698
|
+
/**
|
|
3699
|
+
* Checks for and parses hyphen ranges like "1.0.0 - 2.0.0".
|
|
3700
|
+
*
|
|
3701
|
+
* @param input - Potential hyphen range string
|
|
3702
|
+
* @returns Hyphen range parsing result
|
|
3703
|
+
*/
|
|
3704
|
+
function parseHyphenRange(input) {
|
|
3705
|
+
// Look for " - " (space-hyphen-space)
|
|
3706
|
+
let hyphenPos = -1;
|
|
3707
|
+
for (let i = 0; i < input.length - 2; i++) {
|
|
3708
|
+
if (input[i] === ' ' && input[i + 1] === '-' && input[i + 2] === ' ') {
|
|
3709
|
+
hyphenPos = i;
|
|
3710
|
+
break;
|
|
3711
|
+
}
|
|
3712
|
+
}
|
|
3713
|
+
if (hyphenPos === -1) {
|
|
3714
|
+
return { isHyphenRange: false, success: true };
|
|
3715
|
+
}
|
|
3716
|
+
const leftPart = input.slice(0, hyphenPos).trim();
|
|
3717
|
+
const rightPart = input.slice(hyphenPos + 3).trim();
|
|
3718
|
+
const leftVersion = parseSimpleVersion(leftPart);
|
|
3719
|
+
if (!leftVersion) {
|
|
3720
|
+
return { isHyphenRange: true, success: false, error: `Invalid left side of hyphen range: "${leftPart}"` };
|
|
3721
|
+
}
|
|
3722
|
+
const rightVersion = parseSimpleVersion(rightPart);
|
|
3723
|
+
if (!rightVersion) {
|
|
3724
|
+
return { isHyphenRange: true, success: false, error: `Invalid right side of hyphen range: "${rightPart}"` };
|
|
3725
|
+
}
|
|
3726
|
+
// Hyphen range: >=left <=right
|
|
3727
|
+
const comparators = [createComparator('>=', leftVersion), createComparator('<=', rightVersion)];
|
|
3728
|
+
return {
|
|
3729
|
+
isHyphenRange: true,
|
|
3730
|
+
success: true,
|
|
3731
|
+
set: createComparatorSet(comparators),
|
|
3732
|
+
};
|
|
3733
|
+
}
|
|
3734
|
+
/**
|
|
3735
|
+
* Splits by whitespace.
|
|
3736
|
+
*
|
|
3737
|
+
* @param input - String to split
|
|
3738
|
+
* @returns Array of whitespace-separated tokens
|
|
3739
|
+
*/
|
|
3740
|
+
function splitByWhitespace(input) {
|
|
3741
|
+
const tokens = [];
|
|
3742
|
+
let current = '';
|
|
3743
|
+
for (const char of input) {
|
|
3744
|
+
if (char === ' ' || char === '\t') {
|
|
3745
|
+
if (current) {
|
|
3746
|
+
tokens.push(current);
|
|
3747
|
+
current = '';
|
|
3748
|
+
}
|
|
3749
|
+
}
|
|
3750
|
+
else {
|
|
3751
|
+
current += char;
|
|
3752
|
+
}
|
|
3753
|
+
}
|
|
3754
|
+
if (current) {
|
|
3755
|
+
tokens.push(current);
|
|
3756
|
+
}
|
|
3757
|
+
return tokens;
|
|
3758
|
+
}
|
|
3759
|
+
/**
|
|
3760
|
+
* Parses a single comparator token (e.g., ">=1.0.0", "^1.2.3", "~1.0").
|
|
3761
|
+
*
|
|
3762
|
+
* @param token - Comparator token to parse
|
|
3763
|
+
* @returns Parsed comparator result
|
|
3764
|
+
*/
|
|
3765
|
+
function parseSingleComparator(token) {
|
|
3766
|
+
let pos = 0;
|
|
3767
|
+
let operator = '=';
|
|
3768
|
+
// Parse operator
|
|
3769
|
+
if (token[pos] === '^') {
|
|
3770
|
+
operator = '^';
|
|
3771
|
+
pos++;
|
|
3772
|
+
}
|
|
3773
|
+
else if (token[pos] === '~') {
|
|
3774
|
+
operator = '~';
|
|
3775
|
+
pos++;
|
|
3776
|
+
}
|
|
3777
|
+
else if (token[pos] === '>') {
|
|
3778
|
+
if (token[pos + 1] === '=') {
|
|
3779
|
+
operator = '>=';
|
|
3780
|
+
pos += 2;
|
|
3781
|
+
}
|
|
3782
|
+
else {
|
|
3783
|
+
operator = '>';
|
|
3784
|
+
pos++;
|
|
3785
|
+
}
|
|
3786
|
+
}
|
|
3787
|
+
else if (token[pos] === '<') {
|
|
3788
|
+
if (token[pos + 1] === '=') {
|
|
3789
|
+
operator = '<=';
|
|
3790
|
+
pos += 2;
|
|
3791
|
+
}
|
|
3792
|
+
else {
|
|
3793
|
+
operator = '<';
|
|
3794
|
+
pos++;
|
|
3795
|
+
}
|
|
3796
|
+
}
|
|
3797
|
+
else if (token[pos] === '=') {
|
|
3798
|
+
operator = '=';
|
|
3799
|
+
pos++;
|
|
3800
|
+
}
|
|
3801
|
+
const versionPart = token.slice(pos);
|
|
3802
|
+
// Handle wildcards: *, x, X
|
|
3803
|
+
if (versionPart === '*' || versionPart.toLowerCase() === 'x') {
|
|
3804
|
+
// Wildcard matches any - return empty (will be handled as match-all)
|
|
3805
|
+
return { success: true, comparators: [] };
|
|
3806
|
+
}
|
|
3807
|
+
// Handle x-ranges: 1.x, 1.2.x
|
|
3808
|
+
if (versionPart.includes('x') || versionPart.includes('X') || versionPart.includes('*')) {
|
|
3809
|
+
return parseXRange(versionPart);
|
|
3810
|
+
}
|
|
3811
|
+
// Parse version
|
|
3812
|
+
const version = parseSimpleVersion(versionPart);
|
|
3813
|
+
if (!version) {
|
|
3814
|
+
return { success: false, error: `Invalid version in comparator: "${versionPart}"` };
|
|
3815
|
+
}
|
|
3816
|
+
// For caret and tilde, expand to range
|
|
3817
|
+
if (operator === '^') {
|
|
3818
|
+
return expandCaretRange(version);
|
|
3819
|
+
}
|
|
3820
|
+
if (operator === '~') {
|
|
3821
|
+
return expandTildeRange(version);
|
|
3822
|
+
}
|
|
3823
|
+
return { success: true, comparators: [createComparator(operator, version)] };
|
|
3824
|
+
}
|
|
3825
|
+
/**
|
|
3826
|
+
* Parses x-ranges like 1.x, 1.2.x, etc.
|
|
3827
|
+
*
|
|
3828
|
+
* @param input - X-range string to parse
|
|
3829
|
+
* @param _operator - Range operator (unused but kept for interface consistency)
|
|
3830
|
+
* @returns Comparator result
|
|
3831
|
+
*/
|
|
3832
|
+
function parseXRange(input, _operator) {
|
|
3833
|
+
const parts = input.split('.');
|
|
3834
|
+
const nums = [];
|
|
3835
|
+
for (const part of parts) {
|
|
3836
|
+
const lower = part.toLowerCase();
|
|
3837
|
+
if (lower === 'x' || lower === '*' || lower === '') {
|
|
3838
|
+
break;
|
|
3839
|
+
}
|
|
3840
|
+
const num = parseInt(part, 10);
|
|
3841
|
+
if (globalIsNaN(num) || num < 0) {
|
|
3842
|
+
return { success: false, error: `Invalid x-range: "${input}"` };
|
|
3843
|
+
}
|
|
3844
|
+
nums.push(num);
|
|
3845
|
+
}
|
|
3846
|
+
if (nums.length === 0) {
|
|
3847
|
+
// * or X alone - match any
|
|
3848
|
+
return { success: true, comparators: [] };
|
|
3849
|
+
}
|
|
3850
|
+
if (nums.length === 1) {
|
|
3851
|
+
// 1.x or 1.* -> >=1.0.0 <2.0.0
|
|
3852
|
+
const lower = createSemVer({ major: nums[0], minor: 0, patch: 0 });
|
|
3853
|
+
const upper = createSemVer({ major: nums[0] + 1, minor: 0, patch: 0 });
|
|
3854
|
+
return { success: true, comparators: [createComparator('>=', lower), createComparator('<', upper)] };
|
|
3855
|
+
}
|
|
3856
|
+
// 1.2.x -> >=1.2.0 <1.3.0
|
|
3857
|
+
const lower = createSemVer({ major: nums[0], minor: nums[1], patch: 0 });
|
|
3858
|
+
const upper = createSemVer({ major: nums[0], minor: nums[1] + 1, patch: 0 });
|
|
3859
|
+
return { success: true, comparators: [createComparator('>=', lower), createComparator('<', upper)] };
|
|
3860
|
+
}
|
|
3861
|
+
/**
|
|
3862
|
+
* Expands caret range: ^1.2.3 -> >=1.2.3 <2.0.0
|
|
3863
|
+
*
|
|
3864
|
+
* @param version - Base version for caret range
|
|
3865
|
+
* @returns Expanded comparator result
|
|
3866
|
+
*/
|
|
3867
|
+
function expandCaretRange(version) {
|
|
3868
|
+
let upperMajor = version.major;
|
|
3869
|
+
let upperMinor = 0;
|
|
3870
|
+
let upperPatch = 0;
|
|
3871
|
+
if (version.major === 0) {
|
|
3872
|
+
if (version.minor === 0) {
|
|
3873
|
+
// ^0.0.x -> >=0.0.x <0.0.(x+1)
|
|
3874
|
+
upperPatch = version.patch + 1;
|
|
3875
|
+
upperMinor = version.minor;
|
|
3876
|
+
}
|
|
3877
|
+
else {
|
|
3878
|
+
// ^0.x.y -> >=0.x.y <0.(x+1).0
|
|
3879
|
+
upperMinor = version.minor + 1;
|
|
3880
|
+
}
|
|
3881
|
+
}
|
|
3882
|
+
else {
|
|
3883
|
+
// ^x.y.z -> >=x.y.z <(x+1).0.0
|
|
3884
|
+
upperMajor = version.major + 1;
|
|
3885
|
+
}
|
|
3886
|
+
const upper = createSemVer({ major: upperMajor, minor: upperMinor, patch: upperPatch });
|
|
3887
|
+
return { success: true, comparators: [createComparator('>=', version), createComparator('<', upper)] };
|
|
3888
|
+
}
|
|
3889
|
+
/**
|
|
3890
|
+
* Expands tilde range: ~1.2.3 -> >=1.2.3 <1.3.0
|
|
3891
|
+
*
|
|
3892
|
+
* @param version - Base version for tilde range
|
|
3893
|
+
* @returns Expanded comparator result
|
|
3894
|
+
*/
|
|
3895
|
+
function expandTildeRange(version) {
|
|
3896
|
+
const upper = createSemVer({
|
|
3897
|
+
major: version.major,
|
|
3898
|
+
minor: version.minor + 1,
|
|
3899
|
+
patch: 0,
|
|
3900
|
+
});
|
|
3901
|
+
return { success: true, comparators: [createComparator('>=', version), createComparator('<', upper)] };
|
|
3902
|
+
}
|
|
3903
|
+
/**
|
|
3904
|
+
* Parses a simple version string (no range operators).
|
|
3905
|
+
* More lenient - accepts partial versions.
|
|
3906
|
+
*
|
|
3907
|
+
* @param input - Version string to parse
|
|
3908
|
+
* @returns Parsed SemVer or null if invalid
|
|
3909
|
+
*/
|
|
3910
|
+
function parseSimpleVersion(input) {
|
|
3911
|
+
if (!input)
|
|
3912
|
+
return null;
|
|
3913
|
+
let pos = 0;
|
|
3914
|
+
// Skip leading v
|
|
3915
|
+
if (input[pos] === 'v' || input[pos] === 'V') {
|
|
3916
|
+
pos++;
|
|
3917
|
+
}
|
|
3918
|
+
const parts = input.slice(pos).split('.');
|
|
3919
|
+
if (parts.length === 0)
|
|
3920
|
+
return null;
|
|
3921
|
+
const nums = [];
|
|
3922
|
+
for (const part of parts) {
|
|
3923
|
+
// Stop at prerelease or build
|
|
3924
|
+
const dashIdx = part.indexOf('-');
|
|
3925
|
+
const plusIdx = part.indexOf('+');
|
|
3926
|
+
let numPart = part;
|
|
3927
|
+
if (dashIdx !== -1) {
|
|
3928
|
+
numPart = part.slice(0, dashIdx);
|
|
3929
|
+
}
|
|
3930
|
+
else if (plusIdx !== -1) {
|
|
3931
|
+
numPart = part.slice(0, plusIdx);
|
|
3932
|
+
}
|
|
3933
|
+
if (numPart === '' || numPart.toLowerCase() === 'x' || numPart === '*') {
|
|
3934
|
+
break;
|
|
3935
|
+
}
|
|
3936
|
+
const num = parseInt(numPart, 10);
|
|
3937
|
+
if (globalIsNaN(num) || num < 0)
|
|
3938
|
+
return null;
|
|
3939
|
+
nums.push(num);
|
|
3940
|
+
}
|
|
3941
|
+
if (nums.length === 0)
|
|
3942
|
+
return null;
|
|
3943
|
+
return createSemVer({
|
|
3944
|
+
major: nums[0],
|
|
3945
|
+
minor: nums[1] ?? 0,
|
|
3946
|
+
patch: nums[2] ?? 0,
|
|
3947
|
+
prerelease: [],
|
|
3948
|
+
build: [],
|
|
3949
|
+
raw: input,
|
|
3950
|
+
});
|
|
3951
|
+
}
|
|
3952
|
+
|
|
3953
|
+
/**
|
|
3954
|
+
* Workspace Validation
|
|
3955
|
+
*
|
|
3956
|
+
* Validation utilities for workspace integrity checks.
|
|
3957
|
+
* Verifies package configurations, dependencies, and workspace structure.
|
|
3958
|
+
*/
|
|
3959
|
+
/**
|
|
3960
|
+
* Validates a workspace for common issues.
|
|
3961
|
+
*
|
|
3962
|
+
* @param workspace - The workspace to validate
|
|
3963
|
+
* @returns Validation report
|
|
3964
|
+
*
|
|
3965
|
+
* @example
|
|
3966
|
+
* ```typescript
|
|
3967
|
+
* import { validateWorkspace } from '@hyperfrontend/versioning'
|
|
3968
|
+
*
|
|
3969
|
+
* const report = validateWorkspace(workspace)
|
|
3970
|
+
*
|
|
3971
|
+
* if (!report.valid) {
|
|
3972
|
+
* console.error(`${report.errorCount} error(s) found`)
|
|
3973
|
+
* for (const result of report.results) {
|
|
3974
|
+
* if (!result.result.valid) {
|
|
3975
|
+
* console.error(` ${result.checkName}: ${result.result.error}`)
|
|
3976
|
+
* }
|
|
3977
|
+
* }
|
|
3978
|
+
* }
|
|
3979
|
+
* ```
|
|
3980
|
+
*/
|
|
3981
|
+
function validateWorkspace(workspace) {
|
|
3982
|
+
const results = [];
|
|
3983
|
+
// Workspace-level checks
|
|
3984
|
+
results.push({
|
|
3985
|
+
checkId: 'workspace-has-projects',
|
|
3986
|
+
checkName: 'Workspace has projects',
|
|
3987
|
+
packageName: null,
|
|
3988
|
+
result: validateHasProjects(workspace),
|
|
3989
|
+
});
|
|
3990
|
+
results.push({
|
|
3991
|
+
checkId: 'no-circular-dependencies',
|
|
3992
|
+
checkName: 'No circular dependencies',
|
|
3993
|
+
packageName: null,
|
|
3994
|
+
result: validateNoCircularDependencies(workspace),
|
|
3995
|
+
});
|
|
3996
|
+
// Package-level checks
|
|
3997
|
+
for (const project of workspace.projectList) {
|
|
3998
|
+
results.push({
|
|
3999
|
+
checkId: 'valid-version',
|
|
4000
|
+
checkName: `Valid semver version`,
|
|
4001
|
+
packageName: project.name,
|
|
4002
|
+
result: validateProjectVersion(project),
|
|
4003
|
+
});
|
|
4004
|
+
results.push({
|
|
4005
|
+
checkId: 'valid-name',
|
|
4006
|
+
checkName: `Valid package name`,
|
|
4007
|
+
packageName: project.name,
|
|
4008
|
+
result: validateProjectName(project),
|
|
4009
|
+
});
|
|
4010
|
+
results.push({
|
|
4011
|
+
checkId: 'dependency-versions',
|
|
4012
|
+
checkName: `Internal dependency versions`,
|
|
4013
|
+
packageName: project.name,
|
|
4014
|
+
result: validateDependencyVersions(workspace, project),
|
|
4015
|
+
});
|
|
4016
|
+
}
|
|
4017
|
+
// Aggregate results
|
|
4018
|
+
const errors = results.filter((r) => !r.result.valid);
|
|
4019
|
+
const warnings = results.filter((r) => r.result.warning !== undefined);
|
|
4020
|
+
const invalidPackageNames = errors
|
|
4021
|
+
.filter((r) => r.packageName !== null)
|
|
4022
|
+
.map((r) => r.packageName)
|
|
4023
|
+
.filter((name) => name !== null);
|
|
4024
|
+
const invalidPackages = createSet(invalidPackageNames);
|
|
4025
|
+
return {
|
|
4026
|
+
results,
|
|
4027
|
+
valid: errors.length === 0,
|
|
4028
|
+
errorCount: errors.length,
|
|
4029
|
+
warningCount: warnings.length,
|
|
4030
|
+
invalidPackages: [...invalidPackages],
|
|
4031
|
+
};
|
|
4032
|
+
}
|
|
4033
|
+
/**
|
|
4034
|
+
* Validates that the workspace has at least one project.
|
|
4035
|
+
*
|
|
4036
|
+
* @param workspace - Workspace to validate for project existence
|
|
4037
|
+
* @returns Validation result indicating success or failure
|
|
4038
|
+
*/
|
|
4039
|
+
function validateHasProjects(workspace) {
|
|
4040
|
+
if (workspace.projects.size === 0) {
|
|
4041
|
+
return {
|
|
4042
|
+
valid: false,
|
|
4043
|
+
error: 'Workspace has no projects',
|
|
4044
|
+
};
|
|
4045
|
+
}
|
|
4046
|
+
return { valid: true };
|
|
4047
|
+
}
|
|
4048
|
+
/**
|
|
4049
|
+
* Validates that there are no circular dependencies.
|
|
4050
|
+
*
|
|
4051
|
+
* @param workspace - Workspace to check for circular dependencies
|
|
4052
|
+
* @returns Validation result indicating success or failure with cycle info
|
|
4053
|
+
*/
|
|
4054
|
+
function validateNoCircularDependencies(workspace) {
|
|
4055
|
+
// Build adjacency list
|
|
4056
|
+
const visited = createSet();
|
|
4057
|
+
const recursionStack = createSet();
|
|
4058
|
+
/**
|
|
4059
|
+
* Depth-first search to detect cycles in the dependency graph.
|
|
4060
|
+
*
|
|
4061
|
+
* @param node - Current node being visited
|
|
4062
|
+
* @returns True if a cycle was found starting from this node
|
|
4063
|
+
*/
|
|
4064
|
+
function hasCycle(node) {
|
|
4065
|
+
visited.add(node);
|
|
4066
|
+
recursionStack.add(node);
|
|
4067
|
+
const deps = workspace.reverseDependencyGraph.get(node) ?? [];
|
|
4068
|
+
for (const dep of deps) {
|
|
4069
|
+
if (!visited.has(dep)) {
|
|
4070
|
+
if (hasCycle(dep)) {
|
|
4071
|
+
return true;
|
|
4072
|
+
}
|
|
4073
|
+
}
|
|
4074
|
+
else if (recursionStack.has(dep)) {
|
|
4075
|
+
return true;
|
|
4076
|
+
}
|
|
4077
|
+
}
|
|
4078
|
+
recursionStack.delete(node);
|
|
4079
|
+
return false;
|
|
4080
|
+
}
|
|
4081
|
+
for (const name of workspace.projects.keys()) {
|
|
4082
|
+
if (!visited.has(name)) {
|
|
4083
|
+
if (hasCycle(name)) {
|
|
4084
|
+
return {
|
|
4085
|
+
valid: false,
|
|
4086
|
+
error: 'Circular dependency detected',
|
|
4087
|
+
};
|
|
4088
|
+
}
|
|
4089
|
+
}
|
|
4090
|
+
}
|
|
4091
|
+
return { valid: true };
|
|
4092
|
+
}
|
|
4093
|
+
/**
|
|
4094
|
+
* Validates a project's version is valid semver.
|
|
4095
|
+
*
|
|
4096
|
+
* @param project - Project to validate version for
|
|
4097
|
+
* @returns Validation result indicating success or failure
|
|
4098
|
+
*/
|
|
4099
|
+
function validateProjectVersion(project) {
|
|
4100
|
+
const result = parseVersion(project.version);
|
|
4101
|
+
if (!result.success) {
|
|
4102
|
+
return {
|
|
4103
|
+
valid: false,
|
|
4104
|
+
error: `Invalid semver version: ${project.version}`,
|
|
4105
|
+
};
|
|
4106
|
+
}
|
|
4107
|
+
return { valid: true };
|
|
4108
|
+
}
|
|
4109
|
+
/**
|
|
4110
|
+
* Validates a project's package name.
|
|
4111
|
+
*
|
|
4112
|
+
* @param project - Project to validate name for
|
|
4113
|
+
* @returns Validation result indicating success or failure
|
|
4114
|
+
*/
|
|
4115
|
+
function validateProjectName(project) {
|
|
4116
|
+
if (!project.name || project.name.trim() === '') {
|
|
4117
|
+
return {
|
|
4118
|
+
valid: false,
|
|
4119
|
+
error: 'Package name is required',
|
|
4120
|
+
};
|
|
4121
|
+
}
|
|
4122
|
+
// Check for valid npm package name using safe validation without complex regex
|
|
4123
|
+
const nameValidationResult = validatePackageNameFormat(project.name);
|
|
4124
|
+
if (!nameValidationResult.valid) {
|
|
4125
|
+
return {
|
|
4126
|
+
valid: false,
|
|
4127
|
+
error: `Invalid package name format: ${project.name}`,
|
|
4128
|
+
};
|
|
4129
|
+
}
|
|
4130
|
+
// Check length
|
|
4131
|
+
if (project.name.length > 214) {
|
|
4132
|
+
return {
|
|
4133
|
+
valid: false,
|
|
4134
|
+
error: 'Package name exceeds 214 characters',
|
|
4135
|
+
};
|
|
4136
|
+
}
|
|
4137
|
+
return { valid: true };
|
|
4138
|
+
}
|
|
4139
|
+
/**
|
|
4140
|
+
* Validates that internal dependency versions are satisfiable.
|
|
4141
|
+
*
|
|
4142
|
+
* @param workspace - Workspace containing all projects
|
|
4143
|
+
* @param project - Project to validate dependencies for
|
|
4144
|
+
* @returns Validation result with warnings for unsatisfied versions
|
|
4145
|
+
*/
|
|
4146
|
+
function validateDependencyVersions(workspace, project) {
|
|
4147
|
+
const warnings = [];
|
|
4148
|
+
for (const depName of project.internalDependencies) {
|
|
4149
|
+
const dep = workspace.projects.get(depName);
|
|
4150
|
+
if (!dep) {
|
|
4151
|
+
continue; // Handled by dependency existence check
|
|
4152
|
+
}
|
|
4153
|
+
// Get the version range from package.json
|
|
4154
|
+
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'];
|
|
4155
|
+
let versionRange;
|
|
4156
|
+
for (const depType of depTypes) {
|
|
4157
|
+
const deps = project.packageJson[depType];
|
|
4158
|
+
if (deps?.[depName]) {
|
|
4159
|
+
versionRange = deps[depName];
|
|
4160
|
+
break;
|
|
4161
|
+
}
|
|
4162
|
+
}
|
|
4163
|
+
if (versionRange) {
|
|
4164
|
+
const depVersionResult = parseVersion(dep.version);
|
|
4165
|
+
if (depVersionResult.success && depVersionResult.version && !isWorkspaceVersion(versionRange)) {
|
|
4166
|
+
// Parse the version range
|
|
4167
|
+
const rangeResult = parseRange(versionRange);
|
|
4168
|
+
if (rangeResult.success && rangeResult.range) {
|
|
4169
|
+
// Check if the current version satisfies the range
|
|
4170
|
+
if (!satisfies(depVersionResult.version, rangeResult.range)) {
|
|
4171
|
+
warnings.push(`${depName}@${dep.version} does not satisfy ${versionRange}`);
|
|
4172
|
+
}
|
|
4173
|
+
}
|
|
4174
|
+
}
|
|
4175
|
+
}
|
|
4176
|
+
}
|
|
4177
|
+
if (warnings.length > 0) {
|
|
4178
|
+
return {
|
|
4179
|
+
valid: true, // Warning, not error
|
|
4180
|
+
warning: warnings.join('; '),
|
|
4181
|
+
};
|
|
4182
|
+
}
|
|
4183
|
+
return { valid: true };
|
|
4184
|
+
}
|
|
4185
|
+
/**
|
|
4186
|
+
* Validates npm package name format without using complex regex.
|
|
4187
|
+
* This avoids ReDoS vulnerabilities from backtracking regex patterns.
|
|
4188
|
+
*
|
|
4189
|
+
* @param name - Package name to validate
|
|
4190
|
+
* @returns Validation result
|
|
4191
|
+
*/
|
|
4192
|
+
function validatePackageNameFormat(name) {
|
|
4193
|
+
// Valid characters for package names: lowercase letters, digits, hyphens, underscores, dots
|
|
4194
|
+
const isValidChar = (char) => {
|
|
4195
|
+
const code = char.charCodeAt(0);
|
|
4196
|
+
return ((code >= 97 && code <= 122) || // a-z
|
|
4197
|
+
(code >= 48 && code <= 57) || // 0-9
|
|
4198
|
+
code === 45 || // -
|
|
4199
|
+
code === 95 || // _
|
|
4200
|
+
code === 46 // .
|
|
4201
|
+
);
|
|
4202
|
+
};
|
|
4203
|
+
// First char must be lowercase letter or digit
|
|
4204
|
+
const isValidFirstChar = (char) => {
|
|
4205
|
+
const code = char.charCodeAt(0);
|
|
4206
|
+
return (code >= 97 && code <= 122) || (code >= 48 && code <= 57);
|
|
4207
|
+
};
|
|
4208
|
+
// Handle scoped packages (@scope/name)
|
|
4209
|
+
if (name.startsWith('@')) {
|
|
4210
|
+
const slashIndex = name.indexOf('/');
|
|
4211
|
+
if (slashIndex === -1 || slashIndex === 1 || slashIndex === name.length - 1) {
|
|
4212
|
+
return { valid: false };
|
|
4213
|
+
}
|
|
4214
|
+
// Validate scope (after @, before /)
|
|
4215
|
+
const scope = name.slice(1, slashIndex);
|
|
4216
|
+
if (!isValidFirstChar(scope[0]))
|
|
4217
|
+
return { valid: false };
|
|
4218
|
+
for (const char of scope) {
|
|
4219
|
+
if (!isValidChar(char))
|
|
4220
|
+
return { valid: false };
|
|
4221
|
+
}
|
|
4222
|
+
// Validate name (after /)
|
|
4223
|
+
const packageName = name.slice(slashIndex + 1);
|
|
4224
|
+
if (!isValidFirstChar(packageName[0]))
|
|
4225
|
+
return { valid: false };
|
|
4226
|
+
for (const char of packageName) {
|
|
4227
|
+
if (!isValidChar(char))
|
|
4228
|
+
return { valid: false };
|
|
4229
|
+
}
|
|
4230
|
+
}
|
|
4231
|
+
else {
|
|
4232
|
+
// Unscoped package
|
|
4233
|
+
if (!isValidFirstChar(name[0]))
|
|
4234
|
+
return { valid: false };
|
|
4235
|
+
for (const char of name) {
|
|
4236
|
+
if (!isValidChar(char))
|
|
4237
|
+
return { valid: false };
|
|
4238
|
+
}
|
|
4239
|
+
}
|
|
4240
|
+
return { valid: true };
|
|
4241
|
+
}
|
|
4242
|
+
/**
|
|
4243
|
+
* Checks if a version range is a workspace protocol version.
|
|
4244
|
+
*
|
|
4245
|
+
* @param versionRange - Version range string to check
|
|
4246
|
+
* @returns True if the range uses workspace protocol
|
|
4247
|
+
*/
|
|
4248
|
+
function isWorkspaceVersion(versionRange) {
|
|
4249
|
+
return versionRange.startsWith('workspace:') || versionRange === '*' || versionRange === 'link:';
|
|
4250
|
+
}
|
|
4251
|
+
/**
|
|
4252
|
+
* Validates a single project.
|
|
4253
|
+
*
|
|
4254
|
+
* @param project - The project to validate
|
|
4255
|
+
* @returns Validation result
|
|
4256
|
+
*/
|
|
4257
|
+
function validateProject(project) {
|
|
4258
|
+
const versionResult = validateProjectVersion(project);
|
|
4259
|
+
if (!versionResult.valid) {
|
|
4260
|
+
return versionResult;
|
|
4261
|
+
}
|
|
4262
|
+
const nameResult = validateProjectName(project);
|
|
4263
|
+
if (!nameResult.valid) {
|
|
4264
|
+
return nameResult;
|
|
4265
|
+
}
|
|
4266
|
+
return { valid: true };
|
|
4267
|
+
}
|
|
4268
|
+
/**
|
|
4269
|
+
* Creates a summary of the validation report.
|
|
4270
|
+
*
|
|
4271
|
+
* @param report - Report object from workspace validation
|
|
4272
|
+
* @returns Human-readable summary
|
|
4273
|
+
*/
|
|
4274
|
+
function summarizeValidation(report) {
|
|
4275
|
+
const lines = [];
|
|
4276
|
+
if (report.valid) {
|
|
4277
|
+
lines.push('Workspace validation passed');
|
|
4278
|
+
if (report.warningCount > 0) {
|
|
4279
|
+
lines.push(` ${report.warningCount} warning(s)`);
|
|
4280
|
+
}
|
|
4281
|
+
}
|
|
4282
|
+
else {
|
|
4283
|
+
lines.push('Workspace validation failed');
|
|
4284
|
+
lines.push(` ${report.errorCount} error(s)`);
|
|
4285
|
+
lines.push(` ${report.warningCount} warning(s)`);
|
|
4286
|
+
lines.push('');
|
|
4287
|
+
lines.push('Errors:');
|
|
4288
|
+
for (const result of report.results) {
|
|
4289
|
+
if (!result.result.valid) {
|
|
4290
|
+
const pkg = result.packageName ? `[${result.packageName}] ` : '';
|
|
4291
|
+
lines.push(` ${pkg}${result.checkName}: ${result.result.error}`);
|
|
4292
|
+
}
|
|
4293
|
+
}
|
|
4294
|
+
if (report.warningCount > 0) {
|
|
4295
|
+
lines.push('');
|
|
4296
|
+
lines.push('Warnings:');
|
|
4297
|
+
for (const result of report.results) {
|
|
4298
|
+
if (result.result.warning) {
|
|
4299
|
+
const pkg = result.packageName ? `[${result.packageName}] ` : '';
|
|
4300
|
+
lines.push(` ${pkg}${result.checkName}: ${result.result.warning}`);
|
|
4301
|
+
}
|
|
4302
|
+
}
|
|
4303
|
+
}
|
|
4304
|
+
}
|
|
4305
|
+
return lines.join('\n');
|
|
4306
|
+
}
|
|
4307
|
+
|
|
4308
|
+
/**
|
|
4309
|
+
* Workspace Module
|
|
4310
|
+
*
|
|
4311
|
+
* Package discovery, dependency management, and versioning coordination
|
|
4312
|
+
* for monorepo workspaces. Integrates with project-scope for file operations.
|
|
4313
|
+
*
|
|
4314
|
+
* @example
|
|
4315
|
+
* ```typescript
|
|
4316
|
+
* import { discoverPackages, calculateCascadeBumps, applyBumps } from '@hyperfrontend/versioning'
|
|
4317
|
+
*
|
|
4318
|
+
* // Discover workspace packages
|
|
4319
|
+
* const { projects, projectMap, workspaceRoot } = discoverPackages()
|
|
4320
|
+
*
|
|
4321
|
+
* // Calculate cascade bumps for a package update
|
|
4322
|
+
* const cascadeResult = calculateCascadeBumps(workspace, [
|
|
4323
|
+
* { name: 'lib-utils', bumpType: 'minor' }
|
|
4324
|
+
* ])
|
|
4325
|
+
*
|
|
4326
|
+
* // Apply the bumps
|
|
4327
|
+
* const updateResult = applyBumps(workspace, cascadeResult.bumps)
|
|
4328
|
+
* ```
|
|
4329
|
+
*/
|
|
4330
|
+
/**
|
|
4331
|
+
* Detects the workspace type based on configuration markers.
|
|
4332
|
+
*
|
|
4333
|
+
* @param workspaceRoot - Absolute path to workspace root
|
|
4334
|
+
* @returns Detected workspace type (nx, turbo, lerna, pnpm, yarn, npm, rush, or unknown)
|
|
4335
|
+
*/
|
|
4336
|
+
function detectWorkspaceType(workspaceRoot) {
|
|
4337
|
+
if (exists(join$1(workspaceRoot, 'nx.json')))
|
|
4338
|
+
return 'nx';
|
|
4339
|
+
if (exists(join$1(workspaceRoot, 'turbo.json')))
|
|
4340
|
+
return 'turbo';
|
|
4341
|
+
if (exists(join$1(workspaceRoot, 'lerna.json')))
|
|
4342
|
+
return 'lerna';
|
|
4343
|
+
if (exists(join$1(workspaceRoot, 'pnpm-workspace.yaml')))
|
|
4344
|
+
return 'pnpm';
|
|
4345
|
+
if (exists(join$1(workspaceRoot, 'rush.json')))
|
|
4346
|
+
return 'rush';
|
|
4347
|
+
// Check for yarn workspaces in package.json
|
|
4348
|
+
const rootPkg = readPackageJsonIfExists(join$1(workspaceRoot, 'package.json'));
|
|
4349
|
+
if (rootPkg?.workspaces)
|
|
4350
|
+
return 'yarn';
|
|
4351
|
+
return 'npm';
|
|
4352
|
+
}
|
|
4353
|
+
/**
|
|
4354
|
+
* Creates a complete workspace object by discovering packages
|
|
4355
|
+
* and building the dependency graph.
|
|
4356
|
+
*
|
|
4357
|
+
* @param options - Discovery configuration options
|
|
4358
|
+
* @returns Complete workspace object with projects and dependency graph
|
|
4359
|
+
*
|
|
4360
|
+
* @example
|
|
4361
|
+
* ```typescript
|
|
4362
|
+
* import { createWorkspaceFromDisk } from '@hyperfrontend/versioning'
|
|
4363
|
+
*
|
|
4364
|
+
* const workspace = createWorkspaceFromDisk()
|
|
4365
|
+
*
|
|
4366
|
+
* // Access projects
|
|
4367
|
+
* for (const project of workspace.projectList) {
|
|
4368
|
+
* console.log(`${project.name}@${project.version}`)
|
|
4369
|
+
* }
|
|
4370
|
+
*
|
|
4371
|
+
* // Get dependents of a package
|
|
4372
|
+
* const dependents = workspace.dependencyGraph.get('lib-utils')
|
|
4373
|
+
* ```
|
|
4374
|
+
*/
|
|
4375
|
+
function createWorkspaceFromDisk(options = {}) {
|
|
4376
|
+
const discovery = discoverPackages(options);
|
|
4377
|
+
const analysis = buildDependencyGraph(discovery.projects);
|
|
4378
|
+
const workspaceType = detectWorkspaceType(discovery.workspaceRoot);
|
|
4379
|
+
const projectMap = createMap();
|
|
4380
|
+
for (const project of discovery.projects) {
|
|
4381
|
+
projectMap.set(project.name, project);
|
|
4382
|
+
}
|
|
4383
|
+
return createWorkspace({
|
|
4384
|
+
root: discovery.workspaceRoot,
|
|
4385
|
+
type: workspaceType,
|
|
4386
|
+
projects: projectMap,
|
|
4387
|
+
config: discovery.config,
|
|
4388
|
+
dependencyGraph: analysis.dependencyGraph,
|
|
4389
|
+
reverseDependencyGraph: analysis.reverseDependencyGraph,
|
|
4390
|
+
});
|
|
4391
|
+
}
|
|
4392
|
+
|
|
4393
|
+
export { CHANGELOG_NAMES, DEFAULT_BATCH_UPDATE_OPTIONS, DEFAULT_CASCADE_OPTIONS, DEFAULT_EXCLUDE, DEFAULT_PATTERNS, DEFAULT_WORKSPACE_CONFIG, addDependent, applyBumps, buildDependencyGraph, calculateCascadeBumps, calculateCascadeBumpsFromPackage, createProject, createWorkspace, createWorkspaceConfig, createWorkspaceFromDisk, dependsOn, discoverAllChangelogs, discoverPackages, discoverProject, discoverProjectByName, findChangelogs, findInternalDependencies, findInternalDependenciesWithTypes, findProjectChangelog, getDependencies, getDependencyCount, getDependentCount, getDependents, getExpectedChangelogPath, getProject, getProjectCount, getProjectNames, getTopologicalOrder, getTransitiveDependencies, getTransitiveDependents, hasChangelog, hasInternalDependencies, hasInternalDependents, hasProject, hasChangelog$1 as hasProjectChangelog, isPrivate, isPublishable, summarizeBatchUpdate, summarizeCascadeBumps, summarizeValidation, transitivelyDependsOn, updatePackageVersionInTree, validateProject, validateWorkspace, withDependents };
|
|
4394
|
+
//# sourceMappingURL=index.esm.js.map
|