@hyperfrontend/versioning 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +50 -1
- package/CHANGELOG.md +23 -23
- package/README.md +12 -9
- package/changelog/index.cjs.js +23 -2
- package/changelog/index.cjs.js.map +1 -1
- package/changelog/index.esm.js +23 -2
- package/changelog/index.esm.js.map +1 -1
- package/changelog/models/entry.d.ts +5 -0
- package/changelog/models/entry.d.ts.map +1 -1
- package/changelog/models/index.cjs.js +2 -0
- package/changelog/models/index.cjs.js.map +1 -1
- package/changelog/models/index.esm.js +2 -0
- package/changelog/models/index.esm.js.map +1 -1
- package/changelog/operations/index.cjs.js.map +1 -1
- package/changelog/operations/index.esm.js.map +1 -1
- package/changelog/parse/index.cjs.js +23 -2
- package/changelog/parse/index.cjs.js.map +1 -1
- package/changelog/parse/index.esm.js +23 -2
- package/changelog/parse/index.esm.js.map +1 -1
- package/changelog/parse/line.d.ts.map +1 -1
- package/commits/classify/classifier.d.ts +73 -0
- package/commits/classify/classifier.d.ts.map +1 -0
- package/commits/classify/index.cjs.js +705 -0
- package/commits/classify/index.cjs.js.map +1 -0
- package/commits/classify/index.d.ts +8 -0
- package/commits/classify/index.d.ts.map +1 -0
- package/commits/classify/index.esm.js +678 -0
- package/commits/classify/index.esm.js.map +1 -0
- package/commits/classify/infrastructure.d.ts +205 -0
- package/commits/classify/infrastructure.d.ts.map +1 -0
- package/commits/classify/models.d.ts +108 -0
- package/commits/classify/models.d.ts.map +1 -0
- package/commits/classify/project-scopes.d.ts +59 -0
- package/commits/classify/project-scopes.d.ts.map +1 -0
- package/commits/index.cjs.js +702 -0
- package/commits/index.cjs.js.map +1 -1
- package/commits/index.d.ts +1 -0
- package/commits/index.d.ts.map +1 -1
- package/commits/index.esm.js +677 -1
- package/commits/index.esm.js.map +1 -1
- package/flow/executor/execute.d.ts +6 -0
- package/flow/executor/execute.d.ts.map +1 -1
- package/flow/executor/index.cjs.js +1604 -42
- package/flow/executor/index.cjs.js.map +1 -1
- package/flow/executor/index.esm.js +1610 -48
- package/flow/executor/index.esm.js.map +1 -1
- package/flow/index.cjs.js +6651 -2893
- package/flow/index.cjs.js.map +1 -1
- package/flow/index.esm.js +6655 -2899
- package/flow/index.esm.js.map +1 -1
- package/flow/models/index.cjs.js +125 -0
- package/flow/models/index.cjs.js.map +1 -1
- package/flow/models/index.esm.js +125 -0
- package/flow/models/index.esm.js.map +1 -1
- package/flow/models/types.d.ts +148 -3
- package/flow/models/types.d.ts.map +1 -1
- package/flow/presets/conventional.d.ts +9 -8
- package/flow/presets/conventional.d.ts.map +1 -1
- package/flow/presets/independent.d.ts.map +1 -1
- package/flow/presets/index.cjs.js +3588 -298
- package/flow/presets/index.cjs.js.map +1 -1
- package/flow/presets/index.esm.js +3588 -298
- package/flow/presets/index.esm.js.map +1 -1
- package/flow/presets/synced.d.ts.map +1 -1
- package/flow/steps/analyze-commits.d.ts +9 -6
- package/flow/steps/analyze-commits.d.ts.map +1 -1
- package/flow/steps/calculate-bump.d.ts.map +1 -1
- package/flow/steps/fetch-registry.d.ts.map +1 -1
- package/flow/steps/generate-changelog.d.ts.map +1 -1
- package/flow/steps/index.cjs.js +3604 -318
- package/flow/steps/index.cjs.js.map +1 -1
- package/flow/steps/index.d.ts +1 -0
- package/flow/steps/index.d.ts.map +1 -1
- package/flow/steps/index.esm.js +3603 -319
- package/flow/steps/index.esm.js.map +1 -1
- package/flow/steps/resolve-repository.d.ts +36 -0
- package/flow/steps/resolve-repository.d.ts.map +1 -0
- package/flow/steps/update-packages.d.ts.map +1 -1
- package/git/factory.d.ts +14 -0
- package/git/factory.d.ts.map +1 -1
- package/git/index.cjs.js +65 -0
- package/git/index.cjs.js.map +1 -1
- package/git/index.esm.js +66 -2
- package/git/index.esm.js.map +1 -1
- package/git/operations/index.cjs.js +40 -0
- package/git/operations/index.cjs.js.map +1 -1
- package/git/operations/index.d.ts +1 -1
- package/git/operations/index.d.ts.map +1 -1
- package/git/operations/index.esm.js +41 -2
- package/git/operations/index.esm.js.map +1 -1
- package/git/operations/log.d.ts +23 -0
- package/git/operations/log.d.ts.map +1 -1
- package/index.cjs.js +6962 -4413
- package/index.cjs.js.map +1 -1
- package/index.esm.js +6964 -4415
- package/index.esm.js.map +1 -1
- package/package.json +26 -1
- package/registry/index.cjs.js +3 -3
- package/registry/index.cjs.js.map +1 -1
- package/registry/index.esm.js +3 -3
- package/registry/index.esm.js.map +1 -1
- package/registry/models/index.cjs.js +2 -0
- package/registry/models/index.cjs.js.map +1 -1
- package/registry/models/index.esm.js +2 -0
- package/registry/models/index.esm.js.map +1 -1
- package/registry/models/version-info.d.ts +10 -0
- package/registry/models/version-info.d.ts.map +1 -1
- package/registry/npm/client.d.ts.map +1 -1
- package/registry/npm/index.cjs.js +1 -3
- package/registry/npm/index.cjs.js.map +1 -1
- package/registry/npm/index.esm.js +1 -3
- package/registry/npm/index.esm.js.map +1 -1
- package/repository/index.cjs.js +998 -0
- package/repository/index.cjs.js.map +1 -0
- package/repository/index.d.ts +4 -0
- package/repository/index.d.ts.map +1 -0
- package/repository/index.esm.js +981 -0
- package/repository/index.esm.js.map +1 -0
- package/repository/models/index.cjs.js +301 -0
- package/repository/models/index.cjs.js.map +1 -0
- package/repository/models/index.d.ts +7 -0
- package/repository/models/index.d.ts.map +1 -0
- package/repository/models/index.esm.js +290 -0
- package/repository/models/index.esm.js.map +1 -0
- package/repository/models/platform.d.ts +58 -0
- package/repository/models/platform.d.ts.map +1 -0
- package/repository/models/repository-config.d.ts +132 -0
- package/repository/models/repository-config.d.ts.map +1 -0
- package/repository/models/resolution.d.ts +121 -0
- package/repository/models/resolution.d.ts.map +1 -0
- package/repository/parse/index.cjs.js +755 -0
- package/repository/parse/index.cjs.js.map +1 -0
- package/repository/parse/index.d.ts +5 -0
- package/repository/parse/index.d.ts.map +1 -0
- package/repository/parse/index.esm.js +749 -0
- package/repository/parse/index.esm.js.map +1 -0
- package/repository/parse/package-json.d.ts +100 -0
- package/repository/parse/package-json.d.ts.map +1 -0
- package/repository/parse/url.d.ts +81 -0
- package/repository/parse/url.d.ts.map +1 -0
- package/repository/url/compare.d.ts +84 -0
- package/repository/url/compare.d.ts.map +1 -0
- package/repository/url/index.cjs.js +178 -0
- package/repository/url/index.cjs.js.map +1 -0
- package/repository/url/index.d.ts +3 -0
- package/repository/url/index.d.ts.map +1 -0
- package/repository/url/index.esm.js +176 -0
- package/repository/url/index.esm.js.map +1 -0
- package/workspace/discovery/index.cjs.js +324 -330
- package/workspace/discovery/index.cjs.js.map +1 -1
- package/workspace/discovery/index.esm.js +324 -330
- package/workspace/discovery/index.esm.js.map +1 -1
- package/workspace/discovery/packages.d.ts +0 -6
- package/workspace/discovery/packages.d.ts.map +1 -1
- package/workspace/index.cjs.js +0 -6
- package/workspace/index.cjs.js.map +1 -1
- package/workspace/index.esm.js +0 -6
- package/workspace/index.esm.js.map +1 -1
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import 'node:util';
|
|
2
|
-
import { join, normalize, sep, isAbsolute as isAbsolute$1, relative, resolve, dirname } from 'node:path';
|
|
3
|
-
import { existsSync, mkdirSync, statSync, lstatSync, readdirSync,
|
|
2
|
+
import { join as join$1, normalize, sep, isAbsolute as isAbsolute$1, relative, resolve, dirname, parse as parse$1, basename } from 'node:path';
|
|
3
|
+
import { existsSync, readFileSync, mkdirSync, statSync, lstatSync, readdirSync, readlinkSync, unlinkSync, rmSync, writeFileSync, chmodSync } from 'node:fs';
|
|
4
4
|
import 'node:os';
|
|
5
|
-
import { execSync } from 'node:child_process';
|
|
5
|
+
import { execFileSync, execSync } from 'node:child_process';
|
|
6
6
|
|
|
7
7
|
/**
|
|
8
8
|
* Safe copies of Date built-in via factory function and static methods.
|
|
@@ -115,6 +115,10 @@ const keys = _Object.keys;
|
|
|
115
115
|
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
116
116
|
*/
|
|
117
117
|
const entries = _Object.entries;
|
|
118
|
+
/**
|
|
119
|
+
* (Safe copy) Returns an array of values of the enumerable own properties of an object.
|
|
120
|
+
*/
|
|
121
|
+
const values = _Object.values;
|
|
118
122
|
/**
|
|
119
123
|
* (Safe copy) Adds one or more properties to an object, and/or modifies attributes of existing properties.
|
|
120
124
|
*/
|
|
@@ -572,7 +576,7 @@ function createScopedLogger(namespace, options = {}) {
|
|
|
572
576
|
*/
|
|
573
577
|
createScopedLogger('project-scope');
|
|
574
578
|
|
|
575
|
-
createScopedLogger('project-scope:fs');
|
|
579
|
+
const fsLogger = createScopedLogger('project-scope:fs');
|
|
576
580
|
/**
|
|
577
581
|
* Create a file system error with code and context.
|
|
578
582
|
*
|
|
@@ -589,6 +593,71 @@ function createFileSystemError(message, code, context) {
|
|
|
589
593
|
});
|
|
590
594
|
return error;
|
|
591
595
|
}
|
|
596
|
+
/**
|
|
597
|
+
* Read file contents as string.
|
|
598
|
+
*
|
|
599
|
+
* @param filePath - Path to file
|
|
600
|
+
* @param encoding - File encoding (default: utf-8)
|
|
601
|
+
* @returns File contents as string
|
|
602
|
+
* @throws {Error} If file doesn't exist or can't be read
|
|
603
|
+
*
|
|
604
|
+
* @example
|
|
605
|
+
* ```typescript
|
|
606
|
+
* import { readFileContent } from '@hyperfrontend/project-scope'
|
|
607
|
+
*
|
|
608
|
+
* const content = readFileContent('./package.json')
|
|
609
|
+
* console.log(content) // JSON string
|
|
610
|
+
* ```
|
|
611
|
+
*/
|
|
612
|
+
function readFileContent(filePath, encoding = 'utf-8') {
|
|
613
|
+
if (!existsSync(filePath)) {
|
|
614
|
+
fsLogger.debug('File not found', { path: filePath });
|
|
615
|
+
throw createFileSystemError(`File not found: ${filePath}`, 'FS_NOT_FOUND', { path: filePath, operation: 'read' });
|
|
616
|
+
}
|
|
617
|
+
try {
|
|
618
|
+
return readFileSync(filePath, { encoding });
|
|
619
|
+
}
|
|
620
|
+
catch (error) {
|
|
621
|
+
fsLogger.warn('Failed to read file', { path: filePath });
|
|
622
|
+
throw createFileSystemError(`Failed to read file: ${filePath}`, 'FS_READ_ERROR', { path: filePath, operation: 'read', cause: error });
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
/**
|
|
626
|
+
* Read file if exists, return null otherwise.
|
|
627
|
+
*
|
|
628
|
+
* @param filePath - Path to file
|
|
629
|
+
* @param encoding - File encoding (default: utf-8)
|
|
630
|
+
* @returns File contents or null if file doesn't exist
|
|
631
|
+
*/
|
|
632
|
+
function readFileIfExists(filePath, encoding = 'utf-8') {
|
|
633
|
+
if (!existsSync(filePath)) {
|
|
634
|
+
return null;
|
|
635
|
+
}
|
|
636
|
+
try {
|
|
637
|
+
return readFileSync(filePath, { encoding });
|
|
638
|
+
}
|
|
639
|
+
catch {
|
|
640
|
+
return null;
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
/**
|
|
644
|
+
* Read and parse JSON file if exists, return null otherwise.
|
|
645
|
+
*
|
|
646
|
+
* @param filePath - Path to JSON file
|
|
647
|
+
* @returns Parsed JSON object or null if file doesn't exist or is invalid
|
|
648
|
+
*/
|
|
649
|
+
function readJsonFileIfExists(filePath) {
|
|
650
|
+
if (!existsSync(filePath)) {
|
|
651
|
+
return null;
|
|
652
|
+
}
|
|
653
|
+
try {
|
|
654
|
+
const content = readFileSync(filePath, { encoding: 'utf-8' });
|
|
655
|
+
return parse(content);
|
|
656
|
+
}
|
|
657
|
+
catch {
|
|
658
|
+
return null;
|
|
659
|
+
}
|
|
660
|
+
}
|
|
592
661
|
|
|
593
662
|
const fsWriteLogger = createScopedLogger('project-scope:fs:write');
|
|
594
663
|
/**
|
|
@@ -694,7 +763,7 @@ function readDirectory(dirPath) {
|
|
|
694
763
|
fsDirLogger.debug('Directory read complete', { path: dirPath, entryCount: entries.length });
|
|
695
764
|
return entries.map((entry) => ({
|
|
696
765
|
name: entry.name,
|
|
697
|
-
path: join(dirPath, entry.name),
|
|
766
|
+
path: join$1(dirPath, entry.name),
|
|
698
767
|
isFile: entry.isFile(),
|
|
699
768
|
isDirectory: entry.isDirectory(),
|
|
700
769
|
isSymlink: entry.isSymbolicLink(),
|
|
@@ -710,6 +779,17 @@ function readDirectory(dirPath) {
|
|
|
710
779
|
}
|
|
711
780
|
}
|
|
712
781
|
|
|
782
|
+
/**
|
|
783
|
+
* Join path segments.
|
|
784
|
+
* Uses platform-specific separators (e.g., / or \).
|
|
785
|
+
*
|
|
786
|
+
* @param paths - Path segments to join
|
|
787
|
+
* @returns Joined path
|
|
788
|
+
*/
|
|
789
|
+
function join(...paths) {
|
|
790
|
+
return join$1(...paths);
|
|
791
|
+
}
|
|
792
|
+
|
|
713
793
|
/**
|
|
714
794
|
* Normalize path separators to forward slashes.
|
|
715
795
|
*
|
|
@@ -763,7 +843,7 @@ function relativePath(from, to) {
|
|
|
763
843
|
* @returns Joined path with normalized separators
|
|
764
844
|
*/
|
|
765
845
|
function joinPath(...segments) {
|
|
766
|
-
return normalizePath(join(...segments));
|
|
846
|
+
return normalizePath(join$1(...segments));
|
|
767
847
|
}
|
|
768
848
|
/**
|
|
769
849
|
* Check if path is absolute.
|
|
@@ -785,9 +865,223 @@ function getDirname(filePath) {
|
|
|
785
865
|
return normalizePath(dirname(filePath));
|
|
786
866
|
}
|
|
787
867
|
|
|
788
|
-
createScopedLogger('project-scope:fs:traversal');
|
|
868
|
+
const fsTraversalLogger = createScopedLogger('project-scope:fs:traversal');
|
|
869
|
+
/**
|
|
870
|
+
* Generic upward directory traversal.
|
|
871
|
+
* Name avoids similarity to fs.readdir/fs.readdirSync.
|
|
872
|
+
*
|
|
873
|
+
* @param startPath - Starting directory
|
|
874
|
+
* @param predicate - Function to test each directory
|
|
875
|
+
* @returns First matching directory or null
|
|
876
|
+
*/
|
|
877
|
+
function traverseUpward(startPath, predicate) {
|
|
878
|
+
fsTraversalLogger.debug('Starting upward traversal', { startPath });
|
|
879
|
+
let currentPath = resolve(startPath);
|
|
880
|
+
const rootPath = parse$1(currentPath).root;
|
|
881
|
+
while (currentPath !== rootPath) {
|
|
882
|
+
if (predicate(currentPath)) {
|
|
883
|
+
fsTraversalLogger.debug('Upward traversal found match', { startPath, foundPath: currentPath });
|
|
884
|
+
return currentPath;
|
|
885
|
+
}
|
|
886
|
+
currentPath = dirname(currentPath);
|
|
887
|
+
}
|
|
888
|
+
// Check root directory
|
|
889
|
+
if (predicate(rootPath)) {
|
|
890
|
+
fsTraversalLogger.debug('Upward traversal found match at root', { startPath, foundPath: rootPath });
|
|
891
|
+
return rootPath;
|
|
892
|
+
}
|
|
893
|
+
fsTraversalLogger.debug('Upward traversal found no match', { startPath });
|
|
894
|
+
return null;
|
|
895
|
+
}
|
|
896
|
+
/**
|
|
897
|
+
* Find directory containing any of the specified marker files.
|
|
898
|
+
*
|
|
899
|
+
* @param startPath - Starting directory
|
|
900
|
+
* @param markers - Array of marker file names to search for
|
|
901
|
+
* @returns First directory containing any marker, or null
|
|
902
|
+
*/
|
|
903
|
+
function locateByMarkers(startPath, markers) {
|
|
904
|
+
fsTraversalLogger.debug('Locating by markers', { startPath, markers });
|
|
905
|
+
const result = traverseUpward(startPath, (dir) => markers.some((marker) => exists(join(dir, marker))));
|
|
906
|
+
if (result) {
|
|
907
|
+
fsTraversalLogger.debug('Found directory with marker', { startPath, foundPath: result });
|
|
908
|
+
}
|
|
909
|
+
return result;
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Find directory where predicate returns true, starting from given path.
|
|
913
|
+
*
|
|
914
|
+
* @param startPath - Starting directory
|
|
915
|
+
* @param test - Function to test if directory matches criteria
|
|
916
|
+
* @returns Matching directory path or null
|
|
917
|
+
*/
|
|
918
|
+
function findUpwardWhere(startPath, test) {
|
|
919
|
+
fsTraversalLogger.debug('Finding upward where condition met', { startPath });
|
|
920
|
+
return traverseUpward(startPath, test);
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
/**
|
|
924
|
+
* Create a structured error with code and optional context.
|
|
925
|
+
*
|
|
926
|
+
* @param message - The human-readable error message
|
|
927
|
+
* @param code - The machine-readable error code for programmatic handling
|
|
928
|
+
* @param context - Additional contextual information about the error
|
|
929
|
+
* @returns Structured error instance with code and context properties
|
|
930
|
+
*
|
|
931
|
+
* @example
|
|
932
|
+
* ```typescript
|
|
933
|
+
* import { createStructuredError } from '@hyperfrontend/project-scope'
|
|
934
|
+
*
|
|
935
|
+
* throw createStructuredError(
|
|
936
|
+
* 'Configuration file not found',
|
|
937
|
+
* 'CONFIG_NOT_FOUND',
|
|
938
|
+
* { path: './config.json', searched: ['./config.json', './settings.json'] }
|
|
939
|
+
* )
|
|
940
|
+
* ```
|
|
941
|
+
*/
|
|
942
|
+
function createStructuredError(message, code, context) {
|
|
943
|
+
const error = createError(message);
|
|
944
|
+
error.code = code;
|
|
945
|
+
error.context = context ?? {};
|
|
946
|
+
return error;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Create a configuration-related error.
|
|
950
|
+
*
|
|
951
|
+
* @param message - The human-readable error message
|
|
952
|
+
* @param code - The machine-readable error code for programmatic handling
|
|
953
|
+
* @param context - Additional contextual information (e.g., file path, config key)
|
|
954
|
+
* @returns Structured error instance tagged with type 'config'
|
|
955
|
+
*/
|
|
956
|
+
function createConfigError(message, code, context) {
|
|
957
|
+
return createStructuredError(message, code, { ...context, type: 'config' });
|
|
958
|
+
}
|
|
789
959
|
|
|
790
|
-
createScopedLogger('project-scope:project:package');
|
|
960
|
+
const packageLogger = createScopedLogger('project-scope:project:package');
|
|
961
|
+
/**
|
|
962
|
+
* Verifies that a value is an object with only string values,
|
|
963
|
+
* used for validating dependency maps and script definitions.
|
|
964
|
+
*
|
|
965
|
+
* @param value - Value to check
|
|
966
|
+
* @returns True if value is a record of strings
|
|
967
|
+
*/
|
|
968
|
+
function isStringRecord(value) {
|
|
969
|
+
if (typeof value !== 'object' || value === null)
|
|
970
|
+
return false;
|
|
971
|
+
return values(value).every((v) => typeof v === 'string');
|
|
972
|
+
}
|
|
973
|
+
/**
|
|
974
|
+
* Extracts and normalizes the workspaces field from package.json,
|
|
975
|
+
* supporting both array format and object with packages array.
|
|
976
|
+
*
|
|
977
|
+
* @param value - Raw workspaces value from package.json
|
|
978
|
+
* @returns Normalized workspace patterns or undefined if invalid
|
|
979
|
+
*/
|
|
980
|
+
function parseWorkspaces(value) {
|
|
981
|
+
if (isArray(value) && value.every((v) => typeof v === 'string')) {
|
|
982
|
+
return value;
|
|
983
|
+
}
|
|
984
|
+
if (typeof value === 'object' && value !== null) {
|
|
985
|
+
const obj = value;
|
|
986
|
+
if (isArray(obj['packages'])) {
|
|
987
|
+
return { packages: obj['packages'] };
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
return undefined;
|
|
991
|
+
}
|
|
992
|
+
/**
|
|
993
|
+
* Validate and normalize package.json data.
|
|
994
|
+
*
|
|
995
|
+
* @param data - Raw parsed data
|
|
996
|
+
* @returns Validated package.json
|
|
997
|
+
*/
|
|
998
|
+
function validatePackageJson(data) {
|
|
999
|
+
if (typeof data !== 'object' || data === null) {
|
|
1000
|
+
throw createError('package.json must be an object');
|
|
1001
|
+
}
|
|
1002
|
+
const pkg = data;
|
|
1003
|
+
return {
|
|
1004
|
+
name: typeof pkg['name'] === 'string' ? pkg['name'] : undefined,
|
|
1005
|
+
version: typeof pkg['version'] === 'string' ? pkg['version'] : undefined,
|
|
1006
|
+
description: typeof pkg['description'] === 'string' ? pkg['description'] : undefined,
|
|
1007
|
+
main: typeof pkg['main'] === 'string' ? pkg['main'] : undefined,
|
|
1008
|
+
module: typeof pkg['module'] === 'string' ? pkg['module'] : undefined,
|
|
1009
|
+
browser: typeof pkg['browser'] === 'string' ? pkg['browser'] : undefined,
|
|
1010
|
+
types: typeof pkg['types'] === 'string' ? pkg['types'] : undefined,
|
|
1011
|
+
bin: typeof pkg['bin'] === 'string' || isStringRecord(pkg['bin']) ? pkg['bin'] : undefined,
|
|
1012
|
+
scripts: isStringRecord(pkg['scripts']) ? pkg['scripts'] : undefined,
|
|
1013
|
+
dependencies: isStringRecord(pkg['dependencies']) ? pkg['dependencies'] : undefined,
|
|
1014
|
+
devDependencies: isStringRecord(pkg['devDependencies']) ? pkg['devDependencies'] : undefined,
|
|
1015
|
+
peerDependencies: isStringRecord(pkg['peerDependencies']) ? pkg['peerDependencies'] : undefined,
|
|
1016
|
+
optionalDependencies: isStringRecord(pkg['optionalDependencies']) ? pkg['optionalDependencies'] : undefined,
|
|
1017
|
+
workspaces: parseWorkspaces(pkg['workspaces']),
|
|
1018
|
+
exports: typeof pkg['exports'] === 'object' ? pkg['exports'] : undefined,
|
|
1019
|
+
engines: isStringRecord(pkg['engines']) ? pkg['engines'] : undefined,
|
|
1020
|
+
...pkg,
|
|
1021
|
+
};
|
|
1022
|
+
}
|
|
1023
|
+
/**
|
|
1024
|
+
* Reads and parses package.json from a directory, validating
|
|
1025
|
+
* the structure and normalizing fields to the PackageJson interface.
|
|
1026
|
+
*
|
|
1027
|
+
* @param projectPath - Project directory path or path to package.json
|
|
1028
|
+
* @returns Parsed package.json
|
|
1029
|
+
* @throws {Error} Error if file doesn't exist or is invalid
|
|
1030
|
+
*/
|
|
1031
|
+
function readPackageJson(projectPath) {
|
|
1032
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join$1(projectPath, 'package.json');
|
|
1033
|
+
packageLogger.debug('Reading package.json', { path: packageJsonPath });
|
|
1034
|
+
const content = readFileContent(packageJsonPath);
|
|
1035
|
+
try {
|
|
1036
|
+
const data = parse(content);
|
|
1037
|
+
const validated = validatePackageJson(data);
|
|
1038
|
+
packageLogger.debug('Package.json read successfully', { path: packageJsonPath, name: validated.name });
|
|
1039
|
+
return validated;
|
|
1040
|
+
}
|
|
1041
|
+
catch (error) {
|
|
1042
|
+
packageLogger.warn('Failed to parse package.json', {
|
|
1043
|
+
path: packageJsonPath,
|
|
1044
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1045
|
+
});
|
|
1046
|
+
throw createConfigError(`Failed to parse package.json: ${packageJsonPath}`, 'CONFIG_PARSE_ERROR', {
|
|
1047
|
+
filePath: packageJsonPath,
|
|
1048
|
+
cause: error,
|
|
1049
|
+
});
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
/**
|
|
1053
|
+
* Attempts to read and parse package.json if it exists,
|
|
1054
|
+
* returning null on missing file or parse failure.
|
|
1055
|
+
*
|
|
1056
|
+
* @param projectPath - Project directory path or path to package.json
|
|
1057
|
+
* @returns Parsed package.json or null if not found
|
|
1058
|
+
*/
|
|
1059
|
+
function readPackageJsonIfExists(projectPath) {
|
|
1060
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join$1(projectPath, 'package.json');
|
|
1061
|
+
const content = readFileIfExists(packageJsonPath);
|
|
1062
|
+
if (!content) {
|
|
1063
|
+
packageLogger.debug('Package.json not found', { path: packageJsonPath });
|
|
1064
|
+
return null;
|
|
1065
|
+
}
|
|
1066
|
+
try {
|
|
1067
|
+
const validated = validatePackageJson(parse(content));
|
|
1068
|
+
packageLogger.debug('Package.json loaded', { path: packageJsonPath, name: validated.name });
|
|
1069
|
+
return validated;
|
|
1070
|
+
}
|
|
1071
|
+
catch {
|
|
1072
|
+
packageLogger.debug('Failed to parse package.json, returning null', { path: packageJsonPath });
|
|
1073
|
+
return null;
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
/**
|
|
1077
|
+
* Find nearest package.json by walking up the directory tree.
|
|
1078
|
+
*
|
|
1079
|
+
* @param startPath - Starting path
|
|
1080
|
+
* @returns Path to directory containing package.json, or null if not found
|
|
1081
|
+
*/
|
|
1082
|
+
function findNearestPackageJson(startPath) {
|
|
1083
|
+
return locateByMarkers(startPath, ['package.json']);
|
|
1084
|
+
}
|
|
791
1085
|
|
|
792
1086
|
createScopedLogger('project-scope:heuristics:deps');
|
|
793
1087
|
|
|
@@ -923,9 +1217,410 @@ function createCache$1(options) {
|
|
|
923
1217
|
return freeze(cache);
|
|
924
1218
|
}
|
|
925
1219
|
|
|
926
|
-
|
|
1220
|
+
/**
|
|
1221
|
+
* Pattern matching utilities with ReDoS protection.
|
|
1222
|
+
* Uses character-by-character matching instead of regex where possible.
|
|
1223
|
+
*/
|
|
1224
|
+
/**
|
|
1225
|
+
* Match path against glob pattern using safe character iteration.
|
|
1226
|
+
* Avoids regex to prevent ReDoS attacks.
|
|
1227
|
+
*
|
|
1228
|
+
* Supported patterns:
|
|
1229
|
+
* - * matches any characters except /
|
|
1230
|
+
* - ** matches any characters including /
|
|
1231
|
+
* - ? matches exactly one character except /
|
|
1232
|
+
* - {a,b,c} matches any of the alternatives
|
|
1233
|
+
*
|
|
1234
|
+
* @param path - The filesystem path to test against the pattern
|
|
1235
|
+
* @param pattern - The glob pattern to match against
|
|
1236
|
+
* @returns True if path matches pattern
|
|
1237
|
+
*
|
|
1238
|
+
* @example
|
|
1239
|
+
* ```typescript
|
|
1240
|
+
* import { matchGlobPattern } from '@hyperfrontend/project-scope'
|
|
1241
|
+
*
|
|
1242
|
+
* matchGlobPattern('src/utils/helper.ts', '\*\*\/*.ts') // true
|
|
1243
|
+
* matchGlobPattern('test.spec.ts', '\*.spec.ts') // true
|
|
1244
|
+
* matchGlobPattern('config.json', '\*.{json,yaml}') // true
|
|
1245
|
+
* matchGlobPattern('src/index.ts', 'src/\*.ts') // true
|
|
1246
|
+
* ```
|
|
1247
|
+
*/
|
|
1248
|
+
function matchGlobPattern(path, pattern) {
|
|
1249
|
+
return matchSegments(path.split('/'), pattern.split('/'), 0, 0);
|
|
1250
|
+
}
|
|
1251
|
+
/**
|
|
1252
|
+
* Internal recursive function to match path segments against pattern segments.
|
|
1253
|
+
*
|
|
1254
|
+
* @param pathParts - Array of path segments split by '/'
|
|
1255
|
+
* @param patternParts - Array of pattern segments split by '/'
|
|
1256
|
+
* @param pathIdx - Current index in pathParts being examined
|
|
1257
|
+
* @param patternIdx - Current index in patternParts being examined
|
|
1258
|
+
* @returns True if remaining segments match
|
|
1259
|
+
*/
|
|
1260
|
+
function matchSegments(pathParts, patternParts, pathIdx, patternIdx) {
|
|
1261
|
+
// Base cases
|
|
1262
|
+
if (pathIdx === pathParts.length && patternIdx === patternParts.length) {
|
|
1263
|
+
return true; // Both exhausted = match
|
|
1264
|
+
}
|
|
1265
|
+
if (patternIdx >= patternParts.length) {
|
|
1266
|
+
return false; // Pattern exhausted but path remains
|
|
1267
|
+
}
|
|
1268
|
+
const patternPart = patternParts[patternIdx];
|
|
1269
|
+
// Handle ** (globstar) - matches zero or more directories
|
|
1270
|
+
if (patternPart === '**') {
|
|
1271
|
+
// Try matching rest of pattern against current position and all future positions
|
|
1272
|
+
for (let i = pathIdx; i <= pathParts.length; i++) {
|
|
1273
|
+
if (matchSegments(pathParts, patternParts, i, patternIdx + 1)) {
|
|
1274
|
+
return true;
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
return false;
|
|
1278
|
+
}
|
|
1279
|
+
if (pathIdx >= pathParts.length) {
|
|
1280
|
+
return false; // Path exhausted but pattern remains (and it's not **)
|
|
1281
|
+
}
|
|
1282
|
+
const pathPart = pathParts[pathIdx];
|
|
1283
|
+
// Match current segment
|
|
1284
|
+
if (matchSegment(pathPart, patternPart)) {
|
|
1285
|
+
return matchSegments(pathParts, patternParts, pathIdx + 1, patternIdx + 1);
|
|
1286
|
+
}
|
|
1287
|
+
return false;
|
|
1288
|
+
}
|
|
1289
|
+
/**
|
|
1290
|
+
* Match a single path segment against a pattern segment.
|
|
1291
|
+
* Handles *, ?, and {a,b,c} patterns.
|
|
1292
|
+
*
|
|
1293
|
+
* @param text - The path segment text to match
|
|
1294
|
+
* @param pattern - The pattern segment to match against
|
|
1295
|
+
* @returns True if the text matches the pattern
|
|
1296
|
+
*/
|
|
1297
|
+
function matchSegment(text, pattern) {
|
|
1298
|
+
let textIdx = 0;
|
|
1299
|
+
let patternIdx = 0;
|
|
1300
|
+
while (patternIdx < pattern.length) {
|
|
1301
|
+
const char = pattern[patternIdx];
|
|
1302
|
+
if (char === '*') {
|
|
1303
|
+
// * matches zero or more characters
|
|
1304
|
+
patternIdx++;
|
|
1305
|
+
if (patternIdx === pattern.length) {
|
|
1306
|
+
return true; // * at end matches rest of string
|
|
1307
|
+
}
|
|
1308
|
+
// Try matching rest of pattern at each position in text
|
|
1309
|
+
for (let i = textIdx; i <= text.length; i++) {
|
|
1310
|
+
if (matchSegmentFrom(text, i, pattern, patternIdx)) {
|
|
1311
|
+
return true;
|
|
1312
|
+
}
|
|
1313
|
+
}
|
|
1314
|
+
return false;
|
|
1315
|
+
}
|
|
1316
|
+
else if (char === '?') {
|
|
1317
|
+
// ? matches exactly one character
|
|
1318
|
+
if (textIdx >= text.length) {
|
|
1319
|
+
return false;
|
|
1320
|
+
}
|
|
1321
|
+
textIdx++;
|
|
1322
|
+
patternIdx++;
|
|
1323
|
+
}
|
|
1324
|
+
else if (char === '{') {
|
|
1325
|
+
// {a,b,c} matches any alternative
|
|
1326
|
+
const closeIdx = findClosingBrace(pattern, patternIdx);
|
|
1327
|
+
if (closeIdx === -1) {
|
|
1328
|
+
// Unmatched brace, treat as literal
|
|
1329
|
+
if (textIdx >= text.length || text[textIdx] !== char) {
|
|
1330
|
+
return false;
|
|
1331
|
+
}
|
|
1332
|
+
textIdx++;
|
|
1333
|
+
patternIdx++;
|
|
1334
|
+
}
|
|
1335
|
+
else {
|
|
1336
|
+
const alternatives = extractAlternatives(pattern.slice(patternIdx + 1, closeIdx));
|
|
1337
|
+
for (const alt of alternatives) {
|
|
1338
|
+
if (matchSegmentFrom(text, textIdx, text.slice(0, textIdx) + alt + pattern.slice(closeIdx + 1), textIdx)) {
|
|
1339
|
+
return true;
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
return false;
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
else {
|
|
1346
|
+
// Literal character
|
|
1347
|
+
if (textIdx >= text.length || text[textIdx] !== char) {
|
|
1348
|
+
return false;
|
|
1349
|
+
}
|
|
1350
|
+
textIdx++;
|
|
1351
|
+
patternIdx++;
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
return textIdx === text.length;
|
|
1355
|
+
}
|
|
1356
|
+
/**
|
|
1357
|
+
* Helper to match from a specific position.
|
|
1358
|
+
*
|
|
1359
|
+
* @param text - The full text being matched
|
|
1360
|
+
* @param textIdx - The starting index in text to match from
|
|
1361
|
+
* @param pattern - The full pattern being matched
|
|
1362
|
+
* @param patternIdx - The starting index in pattern to match from
|
|
1363
|
+
* @returns True if the text matches the pattern from the given positions
|
|
1364
|
+
*/
|
|
1365
|
+
function matchSegmentFrom(text, textIdx, pattern, patternIdx) {
|
|
1366
|
+
const remainingText = text.slice(textIdx);
|
|
1367
|
+
const remainingPattern = pattern.slice(patternIdx);
|
|
1368
|
+
return matchSegment(remainingText, remainingPattern);
|
|
1369
|
+
}
|
|
1370
|
+
/**
|
|
1371
|
+
* Find closing brace for {a,b,c} pattern.
|
|
1372
|
+
*
|
|
1373
|
+
* @param pattern - The pattern string to search within
|
|
1374
|
+
* @param startIdx - The index of the opening brace
|
|
1375
|
+
* @returns The index of the matching closing brace, or -1 if not found
|
|
1376
|
+
*/
|
|
1377
|
+
function findClosingBrace(pattern, startIdx) {
|
|
1378
|
+
let depth = 0;
|
|
1379
|
+
for (let i = startIdx; i < pattern.length; i++) {
|
|
1380
|
+
if (pattern[i] === '{') {
|
|
1381
|
+
depth++;
|
|
1382
|
+
}
|
|
1383
|
+
else if (pattern[i] === '}') {
|
|
1384
|
+
depth--;
|
|
1385
|
+
if (depth === 0) {
|
|
1386
|
+
return i;
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
return -1;
|
|
1391
|
+
}
|
|
1392
|
+
/**
|
|
1393
|
+
* Extract alternatives from {a,b,c} pattern content.
|
|
1394
|
+
*
|
|
1395
|
+
* @param content - The content between braces (without the braces themselves)
|
|
1396
|
+
* @returns Array of alternative strings split by commas at depth 0
|
|
1397
|
+
*/
|
|
1398
|
+
function extractAlternatives(content) {
|
|
1399
|
+
const alternatives = [];
|
|
1400
|
+
let current = '';
|
|
1401
|
+
let depth = 0;
|
|
1402
|
+
for (let i = 0; i < content.length; i++) {
|
|
1403
|
+
const char = content[i];
|
|
1404
|
+
if (char === '{') {
|
|
1405
|
+
depth++;
|
|
1406
|
+
current += char;
|
|
1407
|
+
}
|
|
1408
|
+
else if (char === '}') {
|
|
1409
|
+
depth--;
|
|
1410
|
+
current += char;
|
|
1411
|
+
}
|
|
1412
|
+
else if (char === ',' && depth === 0) {
|
|
1413
|
+
alternatives.push(current);
|
|
1414
|
+
current = '';
|
|
1415
|
+
}
|
|
1416
|
+
else {
|
|
1417
|
+
current += char;
|
|
1418
|
+
}
|
|
1419
|
+
}
|
|
1420
|
+
if (current) {
|
|
1421
|
+
alternatives.push(current);
|
|
1422
|
+
}
|
|
1423
|
+
return alternatives;
|
|
1424
|
+
}
|
|
927
1425
|
|
|
928
|
-
createScopedLogger('project-scope:project:
|
|
1426
|
+
const walkLogger = createScopedLogger('project-scope:project:walk');
|
|
1427
|
+
/**
|
|
1428
|
+
* Reads .gitignore file from the given directory and extracts
|
|
1429
|
+
* non-comment patterns for use in file traversal filtering.
|
|
1430
|
+
*
|
|
1431
|
+
* @param startPath - Directory containing the .gitignore file
|
|
1432
|
+
* @returns Array of gitignore patterns
|
|
1433
|
+
*/
|
|
1434
|
+
function loadGitignorePatterns(startPath) {
|
|
1435
|
+
const patterns = [];
|
|
1436
|
+
const gitignorePath = join$1(startPath, '.gitignore');
|
|
1437
|
+
const content = readFileIfExists(gitignorePath);
|
|
1438
|
+
if (content) {
|
|
1439
|
+
const lines = content.split('\n');
|
|
1440
|
+
for (const line of lines) {
|
|
1441
|
+
const trimmed = line.trim();
|
|
1442
|
+
if (trimmed && !trimmed.startsWith('#')) {
|
|
1443
|
+
patterns.push(trimmed);
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
return patterns;
|
|
1448
|
+
}
|
|
1449
|
+
/**
|
|
1450
|
+
* Evaluates whether a relative path should be ignored based on
|
|
1451
|
+
* a list of gitignore-style patterns.
|
|
1452
|
+
*
|
|
1453
|
+
* @param relativePath - Path relative to the root directory
|
|
1454
|
+
* @param patterns - Array of gitignore-style patterns to test
|
|
1455
|
+
* @returns True if the path matches any ignore pattern
|
|
1456
|
+
*/
|
|
1457
|
+
function matchesIgnorePattern(relativePath, patterns) {
|
|
1458
|
+
for (const pattern of patterns) {
|
|
1459
|
+
if (matchPattern(relativePath, pattern)) {
|
|
1460
|
+
return true;
|
|
1461
|
+
}
|
|
1462
|
+
}
|
|
1463
|
+
return false;
|
|
1464
|
+
}
|
|
1465
|
+
/**
|
|
1466
|
+
* Tests if the given path matches a gitignore-style pattern,
|
|
1467
|
+
* supporting negation patterns with '!' prefix.
|
|
1468
|
+
* Uses safe character-by-character matching to prevent ReDoS attacks.
|
|
1469
|
+
*
|
|
1470
|
+
* @param path - File or directory path to test
|
|
1471
|
+
* @param pattern - Gitignore-style pattern (may include wildcards)
|
|
1472
|
+
* @returns True if the path matches the pattern (or doesn't match if negated)
|
|
1473
|
+
*/
|
|
1474
|
+
function matchPattern(path, pattern) {
|
|
1475
|
+
const normalizedPattern = pattern.startsWith('/') ? pattern.slice(1) : pattern;
|
|
1476
|
+
const isNegation = normalizedPattern.startsWith('!');
|
|
1477
|
+
const actualPattern = isNegation ? normalizedPattern.slice(1) : normalizedPattern;
|
|
1478
|
+
const matchesFullPath = matchGlobPattern(path, actualPattern) || matchGlobPattern(path, `**/${actualPattern}`);
|
|
1479
|
+
const matchesSegment = path.split('/').some((segment) => matchGlobPattern(segment, actualPattern));
|
|
1480
|
+
const matches = matchesFullPath || matchesSegment;
|
|
1481
|
+
return isNegation ? !matches : matches;
|
|
1482
|
+
}
|
|
1483
|
+
/**
|
|
1484
|
+
* Traverses a directory tree synchronously, calling a visitor function
|
|
1485
|
+
* for each file and directory encountered. Supports depth limiting,
|
|
1486
|
+
* hidden file filtering, and gitignore pattern matching.
|
|
1487
|
+
*
|
|
1488
|
+
* @param startPath - Root directory to begin traversal
|
|
1489
|
+
* @param visitor - Callback function invoked for each file system entry
|
|
1490
|
+
* @param options - Configuration for traversal behavior
|
|
1491
|
+
*/
|
|
1492
|
+
function walkDirectory(startPath, visitor, options) {
|
|
1493
|
+
walkLogger.debug('Starting directory walk', {
|
|
1494
|
+
startPath,
|
|
1495
|
+
maxDepth: options?.maxDepth ?? -1,
|
|
1496
|
+
includeHidden: options?.includeHidden ?? false,
|
|
1497
|
+
respectGitignore: options?.respectGitignore ?? true,
|
|
1498
|
+
ignorePatterns: options?.ignorePatterns?.length ?? 0,
|
|
1499
|
+
});
|
|
1500
|
+
const maxDepth = options?.maxDepth ?? -1;
|
|
1501
|
+
const includeHidden = options?.includeHidden ?? false;
|
|
1502
|
+
const ignorePatterns = options?.ignorePatterns ?? [];
|
|
1503
|
+
const respectGitignore = options?.respectGitignore ?? true;
|
|
1504
|
+
const gitignorePatterns = respectGitignore ? loadGitignorePatterns(startPath) : [];
|
|
1505
|
+
const allIgnorePatterns = [...ignorePatterns, ...gitignorePatterns];
|
|
1506
|
+
if (gitignorePatterns.length > 0) {
|
|
1507
|
+
walkLogger.debug('Loaded gitignore patterns', { count: gitignorePatterns.length });
|
|
1508
|
+
}
|
|
1509
|
+
/**
|
|
1510
|
+
* Recursively walks directory entries, applying visitor to each.
|
|
1511
|
+
*
|
|
1512
|
+
* @param currentPath - Absolute path to current directory
|
|
1513
|
+
* @param relativePath - Path relative to the starting directory
|
|
1514
|
+
* @param depth - Current recursion depth
|
|
1515
|
+
* @returns False to stop walking, true to continue
|
|
1516
|
+
*/
|
|
1517
|
+
function walk(currentPath, relativePath, depth) {
|
|
1518
|
+
if (maxDepth !== -1 && depth > maxDepth) {
|
|
1519
|
+
return true;
|
|
1520
|
+
}
|
|
1521
|
+
let entries;
|
|
1522
|
+
try {
|
|
1523
|
+
entries = readDirectory(currentPath);
|
|
1524
|
+
}
|
|
1525
|
+
catch {
|
|
1526
|
+
return true;
|
|
1527
|
+
}
|
|
1528
|
+
for (const entry of entries) {
|
|
1529
|
+
if (!includeHidden && entry.name.startsWith('.')) {
|
|
1530
|
+
continue;
|
|
1531
|
+
}
|
|
1532
|
+
const entryRelativePath = relativePath ? `${relativePath}/${entry.name}` : entry.name;
|
|
1533
|
+
if (matchesIgnorePattern(entryRelativePath, allIgnorePatterns)) {
|
|
1534
|
+
continue;
|
|
1535
|
+
}
|
|
1536
|
+
const walkEntry = {
|
|
1537
|
+
name: entry.name,
|
|
1538
|
+
path: entry.path,
|
|
1539
|
+
relativePath: entryRelativePath,
|
|
1540
|
+
isFile: entry.isFile,
|
|
1541
|
+
isDirectory: entry.isDirectory,
|
|
1542
|
+
isSymlink: entry.isSymlink,
|
|
1543
|
+
depth,
|
|
1544
|
+
};
|
|
1545
|
+
const result = visitor(walkEntry);
|
|
1546
|
+
if (result === 'stop') {
|
|
1547
|
+
return false;
|
|
1548
|
+
}
|
|
1549
|
+
if (result === 'skip') {
|
|
1550
|
+
continue;
|
|
1551
|
+
}
|
|
1552
|
+
if (entry.isDirectory) {
|
|
1553
|
+
const shouldContinue = walk(entry.path, entryRelativePath, depth + 1);
|
|
1554
|
+
if (!shouldContinue) {
|
|
1555
|
+
return false;
|
|
1556
|
+
}
|
|
1557
|
+
}
|
|
1558
|
+
}
|
|
1559
|
+
return true;
|
|
1560
|
+
}
|
|
1561
|
+
walk(startPath, '', 0);
|
|
1562
|
+
walkLogger.debug('Directory walk complete', { startPath });
|
|
1563
|
+
}
|
|
1564
|
+
|
|
1565
|
+
const searchLogger = createScopedLogger('project-scope:project:search');
|
|
1566
|
+
/**
|
|
1567
|
+
* Tests if a path matches at least one pattern from an array of globs,
|
|
1568
|
+
* enabling flexible multi-pattern file filtering.
|
|
1569
|
+
* Uses safe character-by-character matching to prevent ReDoS attacks.
|
|
1570
|
+
*
|
|
1571
|
+
* @param path - File path to test
|
|
1572
|
+
* @param patterns - Array of glob patterns
|
|
1573
|
+
* @returns True if path matches any pattern
|
|
1574
|
+
*/
|
|
1575
|
+
function matchesPatterns(path, patterns) {
|
|
1576
|
+
return patterns.some((pattern) => matchGlobPattern(path, pattern));
|
|
1577
|
+
}
|
|
1578
|
+
/**
|
|
1579
|
+
* Searches a directory tree for files matching one or more glob patterns,
|
|
1580
|
+
* returning relative or absolute paths based on options.
|
|
1581
|
+
*
|
|
1582
|
+
* @param startPath - Root directory to begin the search
|
|
1583
|
+
* @param patterns - Glob patterns (e.g., '*.ts', '**\/*.json') to filter files
|
|
1584
|
+
* @param options - Configuration for search behavior
|
|
1585
|
+
* @returns List of relative file paths that match the patterns
|
|
1586
|
+
*
|
|
1587
|
+
* @example
|
|
1588
|
+
* ```typescript
|
|
1589
|
+
* import { findFiles } from '@hyperfrontend/project-scope'
|
|
1590
|
+
*
|
|
1591
|
+
* // Find all TypeScript files
|
|
1592
|
+
* const tsFiles = findFiles('./src', '\*\*\/*.ts')
|
|
1593
|
+
*
|
|
1594
|
+
* // Find multiple file types
|
|
1595
|
+
* const configFiles = findFiles('./', ['\*.json', '\*.yaml', '\*.yml'])
|
|
1596
|
+
*
|
|
1597
|
+
* // Limit results and get absolute paths
|
|
1598
|
+
* const first10 = findFiles('./src', '\*\*\/*.ts', {
|
|
1599
|
+
* maxResults: 10,
|
|
1600
|
+
* absolutePaths: true
|
|
1601
|
+
* })
|
|
1602
|
+
* ```
|
|
1603
|
+
*/
|
|
1604
|
+
function findFiles(startPath, patterns, options) {
|
|
1605
|
+
const normalizedPatterns = isArray(patterns) ? patterns : [patterns];
|
|
1606
|
+
searchLogger.debug('Finding files', { startPath, patterns: normalizedPatterns, maxResults: options?.maxResults });
|
|
1607
|
+
const results = [];
|
|
1608
|
+
const maxResults = options?.maxResults ?? Infinity;
|
|
1609
|
+
walkDirectory(startPath, (entry) => {
|
|
1610
|
+
if (results.length >= maxResults) {
|
|
1611
|
+
return 'stop';
|
|
1612
|
+
}
|
|
1613
|
+
if (!entry.isFile) {
|
|
1614
|
+
return undefined;
|
|
1615
|
+
}
|
|
1616
|
+
if (matchesPatterns(entry.relativePath, normalizedPatterns)) {
|
|
1617
|
+
results.push(options?.absolutePaths ? entry.path : entry.relativePath);
|
|
1618
|
+
}
|
|
1619
|
+
return undefined;
|
|
1620
|
+
}, options);
|
|
1621
|
+
searchLogger.debug('File search complete', { startPath, matchCount: results.length });
|
|
1622
|
+
return results;
|
|
1623
|
+
}
|
|
929
1624
|
|
|
930
1625
|
createScopedLogger('project-scope:heuristics:entry-points');
|
|
931
1626
|
/**
|
|
@@ -936,20 +1631,341 @@ createCache$1({ ttl: 60000, maxSize: 50 });
|
|
|
936
1631
|
|
|
937
1632
|
createScopedLogger('project-scope:tech');
|
|
938
1633
|
/**
|
|
939
|
-
* Cache for tech detection results.
|
|
940
|
-
* TTL: 60 seconds (tech stack can change during active development)
|
|
1634
|
+
* Cache for tech detection results.
|
|
1635
|
+
* TTL: 60 seconds (tech stack can change during active development)
|
|
1636
|
+
*/
|
|
1637
|
+
createCache$1({ ttl: 60000, maxSize: 50 });
|
|
1638
|
+
|
|
1639
|
+
createScopedLogger('project-scope:heuristics:project-type');
|
|
1640
|
+
|
|
1641
|
+
const rootLogger = createScopedLogger('project-scope:root');
|
|
1642
|
+
/**
|
|
1643
|
+
* Files indicating workspace/monorepo root.
|
|
1644
|
+
*/
|
|
1645
|
+
const WORKSPACE_MARKERS = ['nx.json', 'turbo.json', 'lerna.json', 'pnpm-workspace.yaml', 'rush.json'];
|
|
1646
|
+
/**
|
|
1647
|
+
* Find workspace root (monorepo root).
|
|
1648
|
+
* Searches up for workspace markers like nx.json, turbo.json, etc.
|
|
1649
|
+
*
|
|
1650
|
+
* @param startPath - Starting path
|
|
1651
|
+
* @returns Workspace root path or null
|
|
1652
|
+
*
|
|
1653
|
+
* @example
|
|
1654
|
+
* ```typescript
|
|
1655
|
+
* import { findWorkspaceRoot } from '@hyperfrontend/project-scope'
|
|
1656
|
+
*
|
|
1657
|
+
* const root = findWorkspaceRoot('./libs/my-lib')
|
|
1658
|
+
* if (root) {
|
|
1659
|
+
* console.log('Monorepo root:', root) // e.g., '/home/user/my-monorepo'
|
|
1660
|
+
* }
|
|
1661
|
+
* ```
|
|
1662
|
+
*/
|
|
1663
|
+
function findWorkspaceRoot(startPath) {
|
|
1664
|
+
rootLogger.debug('Finding workspace root', { startPath });
|
|
1665
|
+
const byMarker = locateByMarkers(startPath, WORKSPACE_MARKERS);
|
|
1666
|
+
if (byMarker) {
|
|
1667
|
+
rootLogger.debug('Found workspace root by marker', { root: byMarker });
|
|
1668
|
+
return byMarker;
|
|
1669
|
+
}
|
|
1670
|
+
const byWorkspaces = findUpwardWhere(startPath, (dir) => {
|
|
1671
|
+
const pkg = readPackageJsonIfExists(dir);
|
|
1672
|
+
return pkg?.workspaces !== undefined;
|
|
1673
|
+
});
|
|
1674
|
+
if (byWorkspaces) {
|
|
1675
|
+
rootLogger.debug('Found workspace root by workspaces field', { root: byWorkspaces });
|
|
1676
|
+
return byWorkspaces;
|
|
1677
|
+
}
|
|
1678
|
+
const byPackage = findNearestPackageJson(startPath);
|
|
1679
|
+
if (byPackage) {
|
|
1680
|
+
rootLogger.debug('Found workspace root by package.json', { root: byPackage });
|
|
1681
|
+
}
|
|
1682
|
+
else {
|
|
1683
|
+
rootLogger.debug('Workspace root not found');
|
|
1684
|
+
}
|
|
1685
|
+
return byPackage;
|
|
1686
|
+
}
|
|
1687
|
+
|
|
1688
|
+
const nxLogger = createScopedLogger('project-scope:nx');
|
|
1689
|
+
/**
|
|
1690
|
+
* Files indicating NX workspace root.
|
|
1691
|
+
*/
|
|
1692
|
+
const NX_CONFIG_FILES = ['nx.json', 'workspace.json'];
|
|
1693
|
+
/**
|
|
1694
|
+
* NX-specific project file.
|
|
1695
|
+
*/
|
|
1696
|
+
const NX_PROJECT_FILE = 'project.json';
|
|
1697
|
+
/**
|
|
1698
|
+
* Check if directory is an NX workspace root.
|
|
1699
|
+
*
|
|
1700
|
+
* @param path - Directory path to check
|
|
1701
|
+
* @returns True if the directory contains nx.json or workspace.json
|
|
1702
|
+
*
|
|
1703
|
+
* @example
|
|
1704
|
+
* ```typescript
|
|
1705
|
+
* import { isNxWorkspace } from '@hyperfrontend/project-scope'
|
|
1706
|
+
*
|
|
1707
|
+
* if (isNxWorkspace('./my-project')) {
|
|
1708
|
+
* console.log('This is an NX monorepo')
|
|
1709
|
+
* }
|
|
1710
|
+
* ```
|
|
1711
|
+
*/
|
|
1712
|
+
function isNxWorkspace(path) {
|
|
1713
|
+
for (const configFile of NX_CONFIG_FILES) {
|
|
1714
|
+
if (exists(join$1(path, configFile))) {
|
|
1715
|
+
nxLogger.debug('NX workspace detected', { path, configFile });
|
|
1716
|
+
return true;
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
nxLogger.debug('Not an NX workspace', { path });
|
|
1720
|
+
return false;
|
|
1721
|
+
}
|
|
1722
|
+
/**
|
|
1723
|
+
* Check if directory is an NX project.
|
|
1724
|
+
*
|
|
1725
|
+
* @param path - Directory path to check
|
|
1726
|
+
* @returns True if the directory contains project.json
|
|
1727
|
+
*/
|
|
1728
|
+
function isNxProject(path) {
|
|
1729
|
+
const isProject = exists(join$1(path, NX_PROJECT_FILE));
|
|
1730
|
+
nxLogger.debug('NX project check', { path, isProject });
|
|
1731
|
+
return isProject;
|
|
1732
|
+
}
|
|
1733
|
+
/**
|
|
1734
|
+
* Detect NX version from package.json dependencies.
|
|
1735
|
+
*
|
|
1736
|
+
* @param workspacePath - Workspace root path
|
|
1737
|
+
* @returns NX version string (without semver range) or null
|
|
1738
|
+
*/
|
|
1739
|
+
function detectNxVersion(workspacePath) {
|
|
1740
|
+
const packageJson = readPackageJsonIfExists(workspacePath);
|
|
1741
|
+
if (packageJson) {
|
|
1742
|
+
const nxVersion = packageJson.devDependencies?.['nx'] ?? packageJson.dependencies?.['nx'];
|
|
1743
|
+
if (nxVersion) {
|
|
1744
|
+
// Strip semver range characters (^, ~, >=, etc.)
|
|
1745
|
+
return nxVersion.replace(/^[\^~>=<]+/, '');
|
|
1746
|
+
}
|
|
1747
|
+
}
|
|
1748
|
+
return null;
|
|
1749
|
+
}
|
|
1750
|
+
/**
|
|
1751
|
+
* Check if workspace is integrated (not standalone).
|
|
1752
|
+
* Integrated repos typically have workspaceLayout, namedInputs, or targetDefaults.
|
|
1753
|
+
*
|
|
1754
|
+
* @param nxJson - Parsed nx.json configuration
|
|
1755
|
+
* @returns True if the workspace is integrated
|
|
1756
|
+
*/
|
|
1757
|
+
function isIntegratedRepo(nxJson) {
|
|
1758
|
+
return nxJson.workspaceLayout !== undefined || nxJson.namedInputs !== undefined || nxJson.targetDefaults !== undefined;
|
|
1759
|
+
}
|
|
1760
|
+
/**
|
|
1761
|
+
* Get comprehensive NX workspace information.
|
|
1762
|
+
*
|
|
1763
|
+
* @param workspacePath - Workspace root path
|
|
1764
|
+
* @returns Workspace info or null if not an NX workspace
|
|
1765
|
+
*/
|
|
1766
|
+
function getNxWorkspaceInfo(workspacePath) {
|
|
1767
|
+
nxLogger.debug('Getting NX workspace info', { workspacePath });
|
|
1768
|
+
if (!isNxWorkspace(workspacePath)) {
|
|
1769
|
+
return null;
|
|
1770
|
+
}
|
|
1771
|
+
const nxJson = readJsonFileIfExists(join$1(workspacePath, 'nx.json'));
|
|
1772
|
+
if (!nxJson) {
|
|
1773
|
+
// Check for workspace.json as fallback (older NX)
|
|
1774
|
+
const workspaceJson = readJsonFileIfExists(join$1(workspacePath, 'workspace.json'));
|
|
1775
|
+
if (!workspaceJson) {
|
|
1776
|
+
nxLogger.debug('No nx.json or workspace.json found', { workspacePath });
|
|
1777
|
+
return null;
|
|
1778
|
+
}
|
|
1779
|
+
nxLogger.debug('Using legacy workspace.json', { workspacePath });
|
|
1780
|
+
// Create minimal nx.json from workspace.json
|
|
1781
|
+
return {
|
|
1782
|
+
root: workspacePath,
|
|
1783
|
+
version: detectNxVersion(workspacePath),
|
|
1784
|
+
nxJson: {},
|
|
1785
|
+
isIntegrated: true,
|
|
1786
|
+
workspaceLayout: {
|
|
1787
|
+
appsDir: 'apps',
|
|
1788
|
+
libsDir: 'libs',
|
|
1789
|
+
},
|
|
1790
|
+
};
|
|
1791
|
+
}
|
|
1792
|
+
const info = {
|
|
1793
|
+
root: workspacePath,
|
|
1794
|
+
version: detectNxVersion(workspacePath),
|
|
1795
|
+
nxJson,
|
|
1796
|
+
isIntegrated: isIntegratedRepo(nxJson),
|
|
1797
|
+
defaultProject: nxJson.defaultProject,
|
|
1798
|
+
workspaceLayout: {
|
|
1799
|
+
appsDir: nxJson.workspaceLayout?.appsDir ?? 'apps',
|
|
1800
|
+
libsDir: nxJson.workspaceLayout?.libsDir ?? 'libs',
|
|
1801
|
+
},
|
|
1802
|
+
};
|
|
1803
|
+
nxLogger.debug('NX workspace info retrieved', {
|
|
1804
|
+
workspacePath,
|
|
1805
|
+
version: info.version,
|
|
1806
|
+
isIntegrated: info.isIntegrated,
|
|
1807
|
+
defaultProject: info.defaultProject,
|
|
1808
|
+
});
|
|
1809
|
+
return info;
|
|
1810
|
+
}
|
|
1811
|
+
|
|
1812
|
+
createScopedLogger('project-scope:nx:devkit');
|
|
1813
|
+
|
|
1814
|
+
const nxConfigLogger = createScopedLogger('project-scope:nx:config');
|
|
1815
|
+
/**
|
|
1816
|
+
* Read project.json for an NX project.
|
|
1817
|
+
*
|
|
1818
|
+
* @param projectPath - Project directory path
|
|
1819
|
+
* @returns Parsed project.json or null if not found
|
|
1820
|
+
*/
|
|
1821
|
+
function readProjectJson(projectPath) {
|
|
1822
|
+
const projectJsonPath = join$1(projectPath, NX_PROJECT_FILE);
|
|
1823
|
+
nxConfigLogger.debug('Reading project.json', { path: projectJsonPath });
|
|
1824
|
+
const result = readJsonFileIfExists(projectJsonPath);
|
|
1825
|
+
if (result) {
|
|
1826
|
+
nxConfigLogger.debug('Project.json loaded', { path: projectJsonPath, name: result.name });
|
|
1827
|
+
}
|
|
1828
|
+
else {
|
|
1829
|
+
nxConfigLogger.debug('Project.json not found', { path: projectJsonPath });
|
|
1830
|
+
}
|
|
1831
|
+
return result;
|
|
1832
|
+
}
|
|
1833
|
+
/**
|
|
1834
|
+
* Get project configuration from project.json or package.json nx field.
|
|
1835
|
+
*
|
|
1836
|
+
* @param projectPath - Project directory path
|
|
1837
|
+
* @param workspacePath - Workspace root path (for relative path calculation)
|
|
1838
|
+
* @returns Project configuration or null if not found
|
|
1839
|
+
*/
|
|
1840
|
+
function getProjectConfig(projectPath, workspacePath) {
|
|
1841
|
+
nxConfigLogger.debug('Getting project config', { projectPath, workspacePath });
|
|
1842
|
+
// Try project.json first
|
|
1843
|
+
const projectJson = readProjectJson(projectPath);
|
|
1844
|
+
if (projectJson) {
|
|
1845
|
+
nxConfigLogger.debug('Using project.json config', { projectPath, name: projectJson.name });
|
|
1846
|
+
return {
|
|
1847
|
+
...projectJson,
|
|
1848
|
+
root: projectJson.root ?? relative(workspacePath, projectPath),
|
|
1849
|
+
};
|
|
1850
|
+
}
|
|
1851
|
+
// Try to infer from package.json nx field
|
|
1852
|
+
const packageJson = readPackageJsonIfExists(projectPath);
|
|
1853
|
+
if (packageJson && typeof packageJson['nx'] === 'object') {
|
|
1854
|
+
nxConfigLogger.debug('Using package.json nx field', { projectPath, name: packageJson.name });
|
|
1855
|
+
const nxConfig = packageJson['nx'];
|
|
1856
|
+
return {
|
|
1857
|
+
name: packageJson.name,
|
|
1858
|
+
root: relative(workspacePath, projectPath),
|
|
1859
|
+
...nxConfig,
|
|
1860
|
+
};
|
|
1861
|
+
}
|
|
1862
|
+
nxConfigLogger.debug('No project config found', { projectPath });
|
|
1863
|
+
return null;
|
|
1864
|
+
}
|
|
1865
|
+
/**
|
|
1866
|
+
* Recursively scan directory for project.json files.
|
|
1867
|
+
*
|
|
1868
|
+
* @param dirPath - Directory to scan
|
|
1869
|
+
* @param workspacePath - Workspace root path
|
|
1870
|
+
* @param projects - Map to add discovered projects to
|
|
1871
|
+
* @param maxDepth - Maximum recursion depth
|
|
1872
|
+
* @param currentDepth - Current recursion depth
|
|
941
1873
|
*/
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
1874
|
+
function scanForProjects(dirPath, workspacePath, projects, maxDepth, currentDepth = 0) {
|
|
1875
|
+
if (currentDepth > maxDepth)
|
|
1876
|
+
return;
|
|
1877
|
+
try {
|
|
1878
|
+
const entries = readDirectory(dirPath);
|
|
1879
|
+
for (const entry of entries) {
|
|
1880
|
+
// Skip node_modules and hidden directories
|
|
1881
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'dist') {
|
|
1882
|
+
continue;
|
|
1883
|
+
}
|
|
1884
|
+
const fullPath = join$1(dirPath, entry.name);
|
|
1885
|
+
if (entry.isDirectory) {
|
|
1886
|
+
// Check if this directory is an NX project
|
|
1887
|
+
if (isNxProject(fullPath)) {
|
|
1888
|
+
const config = getProjectConfig(fullPath, workspacePath);
|
|
1889
|
+
if (config) {
|
|
1890
|
+
const name = config.name || relative(workspacePath, fullPath).replace(/[\\/]/g, '-');
|
|
1891
|
+
projects.set(name, {
|
|
1892
|
+
...config,
|
|
1893
|
+
name,
|
|
1894
|
+
root: relative(workspacePath, fullPath),
|
|
1895
|
+
});
|
|
1896
|
+
}
|
|
1897
|
+
}
|
|
1898
|
+
// Recursively scan subdirectories
|
|
1899
|
+
scanForProjects(fullPath, workspacePath, projects, maxDepth, currentDepth + 1);
|
|
1900
|
+
}
|
|
1901
|
+
}
|
|
1902
|
+
}
|
|
1903
|
+
catch {
|
|
1904
|
+
// Directory not readable, skip
|
|
1905
|
+
}
|
|
1906
|
+
}
|
|
1907
|
+
/**
|
|
1908
|
+
* Discover all NX projects in workspace.
|
|
1909
|
+
* Supports both workspace.json (older format) and project.json (newer format).
|
|
1910
|
+
*
|
|
1911
|
+
* @param workspacePath - Workspace root path
|
|
1912
|
+
* @returns Map of project name to configuration
|
|
1913
|
+
*/
|
|
1914
|
+
function discoverNxProjects(workspacePath) {
|
|
1915
|
+
const projects = createMap();
|
|
1916
|
+
// Check for workspace.json (older NX format)
|
|
1917
|
+
const workspaceJson = readJsonFileIfExists(join$1(workspacePath, 'workspace.json'));
|
|
1918
|
+
if (workspaceJson?.projects) {
|
|
1919
|
+
for (const [name, config] of entries(workspaceJson.projects)) {
|
|
1920
|
+
if (typeof config === 'string') {
|
|
1921
|
+
// Path reference to project directory
|
|
1922
|
+
const projectPath = join$1(workspacePath, config);
|
|
1923
|
+
const projectConfig = getProjectConfig(projectPath, workspacePath);
|
|
1924
|
+
if (projectConfig) {
|
|
1925
|
+
projects.set(name, { ...projectConfig, name });
|
|
1926
|
+
}
|
|
1927
|
+
}
|
|
1928
|
+
else if (typeof config === 'object' && config !== null) {
|
|
1929
|
+
// Inline config
|
|
1930
|
+
projects.set(name, { name, ...config });
|
|
1931
|
+
}
|
|
1932
|
+
}
|
|
1933
|
+
return projects;
|
|
1934
|
+
}
|
|
1935
|
+
// Scan for project.json files (newer NX format)
|
|
1936
|
+
const workspaceInfo = getNxWorkspaceInfo(workspacePath);
|
|
1937
|
+
const appsDir = workspaceInfo?.workspaceLayout.appsDir ?? 'apps';
|
|
1938
|
+
const libsDir = workspaceInfo?.workspaceLayout.libsDir ?? 'libs';
|
|
1939
|
+
const searchDirs = [appsDir, libsDir];
|
|
1940
|
+
// Also check packages directory (common in some setups)
|
|
1941
|
+
if (exists(join$1(workspacePath, 'packages'))) {
|
|
1942
|
+
searchDirs.push('packages');
|
|
1943
|
+
}
|
|
1944
|
+
for (const dir of searchDirs) {
|
|
1945
|
+
const dirPath = join$1(workspacePath, dir);
|
|
1946
|
+
if (exists(dirPath) && isDirectory(dirPath)) {
|
|
1947
|
+
try {
|
|
1948
|
+
scanForProjects(dirPath, workspacePath, projects, 3);
|
|
1949
|
+
}
|
|
1950
|
+
catch {
|
|
1951
|
+
// Directory not accessible
|
|
1952
|
+
}
|
|
1953
|
+
}
|
|
1954
|
+
}
|
|
1955
|
+
// Also check root-level projects (standalone projects in monorepo root)
|
|
1956
|
+
if (isNxProject(workspacePath)) {
|
|
1957
|
+
const config = readProjectJson(workspacePath);
|
|
1958
|
+
if (config) {
|
|
1959
|
+
const name = config.name || basename(workspacePath);
|
|
1960
|
+
projects.set(name, {
|
|
1961
|
+
...config,
|
|
1962
|
+
name,
|
|
1963
|
+
root: '.',
|
|
1964
|
+
});
|
|
1965
|
+
}
|
|
1966
|
+
}
|
|
1967
|
+
return projects;
|
|
1968
|
+
}
|
|
953
1969
|
|
|
954
1970
|
createScopedLogger('project-scope:config');
|
|
955
1971
|
/**
|
|
@@ -1454,7 +2470,7 @@ function commitChanges(tree, options) {
|
|
|
1454
2470
|
return order[a.type] - order[b.type];
|
|
1455
2471
|
});
|
|
1456
2472
|
for (const change of sortedChanges) {
|
|
1457
|
-
const absPath = join(tree.root, change.path);
|
|
2473
|
+
const absPath = join$1(tree.root, change.path);
|
|
1458
2474
|
try {
|
|
1459
2475
|
switch (change.type) {
|
|
1460
2476
|
case 'CREATE':
|
|
@@ -1821,6 +2837,45 @@ function commitExists(hash, options = {}) {
|
|
|
1821
2837
|
return false;
|
|
1822
2838
|
}
|
|
1823
2839
|
}
|
|
2840
|
+
/**
|
|
2841
|
+
* Checks if a commit is reachable from HEAD (i.e., is an ancestor of HEAD).
|
|
2842
|
+
*
|
|
2843
|
+
* A commit may exist in the repository but be orphaned (not in current branch history).
|
|
2844
|
+
* This function verifies that the commit is actually in the history of the current HEAD.
|
|
2845
|
+
*
|
|
2846
|
+
* Common use cases:
|
|
2847
|
+
* - Verify an external commit reference before using it for range queries
|
|
2848
|
+
* - Detect if history was rewritten (rebase/force push) after a reference was recorded
|
|
2849
|
+
*
|
|
2850
|
+
* @param hash - Commit hash to check
|
|
2851
|
+
* @param options - Additional options
|
|
2852
|
+
* @returns True if the commit is an ancestor of HEAD
|
|
2853
|
+
*
|
|
2854
|
+
* @example
|
|
2855
|
+
* if (commitReachableFromHead(baseCommit)) {
|
|
2856
|
+
* // Safe to use for commit range queries
|
|
2857
|
+
* const commits = getCommitsSince(baseCommit)
|
|
2858
|
+
* } else {
|
|
2859
|
+
* // Commit not in current history, need fallback strategy
|
|
2860
|
+
* }
|
|
2861
|
+
*/
|
|
2862
|
+
function commitReachableFromHead(hash, options = {}) {
|
|
2863
|
+
const safeHash = escapeGitRef(hash);
|
|
2864
|
+
try {
|
|
2865
|
+
// git merge-base --is-ancestor exits with 0 if commit is ancestor of HEAD
|
|
2866
|
+
execFileSync('git', ['merge-base', '--is-ancestor', safeHash, 'HEAD'], {
|
|
2867
|
+
encoding: 'utf-8',
|
|
2868
|
+
cwd: options.cwd,
|
|
2869
|
+
timeout: options.timeout ?? 5000,
|
|
2870
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
2871
|
+
});
|
|
2872
|
+
return true;
|
|
2873
|
+
}
|
|
2874
|
+
catch {
|
|
2875
|
+
// Exit code 1 means not an ancestor, other errors also return false
|
|
2876
|
+
return false;
|
|
2877
|
+
}
|
|
2878
|
+
}
|
|
1824
2879
|
/**
|
|
1825
2880
|
* Parses raw git log output into GitCommit objects.
|
|
1826
2881
|
*
|
|
@@ -3463,6 +4518,7 @@ function createGitClient(config = {}) {
|
|
|
3463
4518
|
getCommitsSince: (since, options) => getCommitsSince(since, { ...opts, ...options }),
|
|
3464
4519
|
getCommit: (hash) => getCommit(hash, opts),
|
|
3465
4520
|
commitExists: (hash) => commitExists(hash, opts),
|
|
4521
|
+
commitReachableFromHead: (hash) => commitReachableFromHead(hash, opts),
|
|
3466
4522
|
// Tag operations
|
|
3467
4523
|
getTags: (options) => getTags({ ...opts, ...options }),
|
|
3468
4524
|
getTag: (name) => getTag(name, opts),
|
|
@@ -3506,6 +4562,7 @@ function createGitClient(config = {}) {
|
|
|
3506
4562
|
fetch: (remote, options) => fetch(opts, remote, options),
|
|
3507
4563
|
pull: (remote, branch) => pull(opts, remote, branch),
|
|
3508
4564
|
push: (remote, branch, options) => push(opts, remote, branch, options),
|
|
4565
|
+
getRemoteUrl: (remoteName) => getRemoteUrl(opts, remoteName),
|
|
3509
4566
|
};
|
|
3510
4567
|
}
|
|
3511
4568
|
// ============================================================================
|
|
@@ -3676,6 +4733,29 @@ function push(options, remote = 'origin', branch, pushOptions) {
|
|
|
3676
4733
|
return false;
|
|
3677
4734
|
}
|
|
3678
4735
|
}
|
|
4736
|
+
/**
|
|
4737
|
+
* Gets the URL of a remote.
|
|
4738
|
+
*
|
|
4739
|
+
* @param options - Configuration object containing cwd and timeout
|
|
4740
|
+
* @param options.cwd - Working directory for the git command
|
|
4741
|
+
* @param options.timeout - Command timeout in milliseconds
|
|
4742
|
+
* @param remoteName - Name of the remote (defaults to 'origin')
|
|
4743
|
+
* @returns The remote URL, or null if not found
|
|
4744
|
+
*/
|
|
4745
|
+
function getRemoteUrl(options, remoteName = 'origin') {
|
|
4746
|
+
try {
|
|
4747
|
+
const output = execFileSync('git', ['remote', 'get-url', remoteName], {
|
|
4748
|
+
encoding: 'utf-8',
|
|
4749
|
+
cwd: options.cwd,
|
|
4750
|
+
timeout: options.timeout,
|
|
4751
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
4752
|
+
});
|
|
4753
|
+
return output.trim() || null;
|
|
4754
|
+
}
|
|
4755
|
+
catch {
|
|
4756
|
+
return null;
|
|
4757
|
+
}
|
|
4758
|
+
}
|
|
3679
4759
|
|
|
3680
4760
|
/**
|
|
3681
4761
|
* Creates a new cache instance.
|
|
@@ -3881,6 +4961,7 @@ async function getVersionInfo(state, packageName, version) {
|
|
|
3881
4961
|
engines: data.engines,
|
|
3882
4962
|
nodeVersion: data._nodeVersion,
|
|
3883
4963
|
npmVersion: data._npmVersion,
|
|
4964
|
+
gitHead: data.gitHead,
|
|
3884
4965
|
};
|
|
3885
4966
|
state.cache.set(cacheKey, info);
|
|
3886
4967
|
return info;
|
|
@@ -3919,9 +5000,6 @@ async function listVersions(state, packageName) {
|
|
|
3919
5000
|
return [];
|
|
3920
5001
|
}
|
|
3921
5002
|
}
|
|
3922
|
-
// ============================================================================
|
|
3923
|
-
// Security helpers - character-by-character validation (no regex)
|
|
3924
|
-
// ============================================================================
|
|
3925
5003
|
/**
|
|
3926
5004
|
* Maximum allowed package name length (npm limit).
|
|
3927
5005
|
*/
|
|
@@ -4045,6 +5123,422 @@ function createRegistry(type = 'npm', config = {}) {
|
|
|
4045
5123
|
}
|
|
4046
5124
|
}
|
|
4047
5125
|
|
|
5126
|
+
/**
|
|
5127
|
+
* Project Model
|
|
5128
|
+
*
|
|
5129
|
+
* Represents a single project/package within a workspace.
|
|
5130
|
+
* Contains package.json data, paths, and dependency information.
|
|
5131
|
+
*/
|
|
5132
|
+
/**
|
|
5133
|
+
* Creates a new Project object.
|
|
5134
|
+
*
|
|
5135
|
+
* @param options - Project properties
|
|
5136
|
+
* @returns A new Project object
|
|
5137
|
+
*/
|
|
5138
|
+
function createProject(options) {
|
|
5139
|
+
const isPrivate = options.packageJson['private'] === true;
|
|
5140
|
+
const publishable = !isPrivate && options.name !== undefined && options.version !== undefined;
|
|
5141
|
+
return {
|
|
5142
|
+
name: options.name,
|
|
5143
|
+
version: options.version,
|
|
5144
|
+
path: options.path,
|
|
5145
|
+
packageJsonPath: options.packageJsonPath,
|
|
5146
|
+
packageJson: options.packageJson,
|
|
5147
|
+
changelogPath: options.changelogPath ?? null,
|
|
5148
|
+
internalDependencies: options.internalDependencies ?? [],
|
|
5149
|
+
internalDependents: options.internalDependents ?? [],
|
|
5150
|
+
publishable,
|
|
5151
|
+
private: isPrivate,
|
|
5152
|
+
};
|
|
5153
|
+
}
|
|
5154
|
+
|
|
5155
|
+
/**
|
|
5156
|
+
* Workspace Model
|
|
5157
|
+
*
|
|
5158
|
+
* Represents a monorepo workspace with multiple projects.
|
|
5159
|
+
* Used for package discovery, dependency tracking, and coordinated versioning.
|
|
5160
|
+
*/
|
|
5161
|
+
/**
|
|
5162
|
+
* Default workspace discovery patterns.
|
|
5163
|
+
*/
|
|
5164
|
+
const DEFAULT_PATTERNS = [
|
|
5165
|
+
'libs/*/package.json',
|
|
5166
|
+
'apps/*/package.json',
|
|
5167
|
+
'packages/*/package.json',
|
|
5168
|
+
'tools/*/package.json',
|
|
5169
|
+
'plugins/*/package.json',
|
|
5170
|
+
];
|
|
5171
|
+
/**
|
|
5172
|
+
* Default exclusion patterns.
|
|
5173
|
+
*/
|
|
5174
|
+
const DEFAULT_EXCLUDE = ['**/node_modules/**', '**/dist/**', '**/coverage/**', '**/.git/**'];
|
|
5175
|
+
/**
|
|
5176
|
+
* Default workspace configuration.
|
|
5177
|
+
*/
|
|
5178
|
+
const DEFAULT_WORKSPACE_CONFIG = {
|
|
5179
|
+
patterns: DEFAULT_PATTERNS,
|
|
5180
|
+
exclude: DEFAULT_EXCLUDE,
|
|
5181
|
+
includeChangelogs: true,
|
|
5182
|
+
trackDependencies: true,
|
|
5183
|
+
};
|
|
5184
|
+
|
|
5185
|
+
/**
|
|
5186
|
+
* Dependency Graph
|
|
5187
|
+
*
|
|
5188
|
+
* Builds and analyzes dependency relationships between workspace projects.
|
|
5189
|
+
* Provides functions for traversing the dependency graph and determining
|
|
5190
|
+
* build/release order.
|
|
5191
|
+
*/
|
|
5192
|
+
/**
|
|
5193
|
+
* Finds internal dependencies in a package.json.
|
|
5194
|
+
* Returns names of workspace packages that this package depends on.
|
|
5195
|
+
*
|
|
5196
|
+
* @param packageJson - Parsed package.json content
|
|
5197
|
+
* @param workspacePackageNames - Set of all package names in the workspace
|
|
5198
|
+
* @returns Array of internal dependency names
|
|
5199
|
+
*
|
|
5200
|
+
* @example
|
|
5201
|
+
* ```typescript
|
|
5202
|
+
* const internalDeps = findInternalDependencies(packageJson, allPackageNames)
|
|
5203
|
+
* // ['@scope/lib-a', '@scope/lib-b']
|
|
5204
|
+
* ```
|
|
5205
|
+
*/
|
|
5206
|
+
function findInternalDependencies(packageJson, workspacePackageNames) {
|
|
5207
|
+
const internal = [];
|
|
5208
|
+
const allDeps = {
|
|
5209
|
+
...packageJson.dependencies,
|
|
5210
|
+
...packageJson.devDependencies,
|
|
5211
|
+
...packageJson.peerDependencies,
|
|
5212
|
+
...packageJson.optionalDependencies,
|
|
5213
|
+
};
|
|
5214
|
+
for (const depName of keys(allDeps)) {
|
|
5215
|
+
if (workspacePackageNames.has(depName)) {
|
|
5216
|
+
internal.push(depName);
|
|
5217
|
+
}
|
|
5218
|
+
}
|
|
5219
|
+
return internal;
|
|
5220
|
+
}
|
|
5221
|
+
|
|
5222
|
+
/**
|
|
5223
|
+
* Changelog Discovery
|
|
5224
|
+
*
|
|
5225
|
+
* Discovers CHANGELOG.md files within workspace projects.
|
|
5226
|
+
*/
|
|
5227
|
+
/**
|
|
5228
|
+
* Common changelog file names in priority order.
|
|
5229
|
+
*/
|
|
5230
|
+
const CHANGELOG_NAMES = ['CHANGELOG.md', 'Changelog.md', 'changelog.md', 'HISTORY.md', 'CHANGES.md'];
|
|
5231
|
+
/**
|
|
5232
|
+
* Finds changelog files for a list of packages.
|
|
5233
|
+
* Returns a map of project path to changelog absolute path.
|
|
5234
|
+
*
|
|
5235
|
+
* @param workspaceRoot - Workspace root path
|
|
5236
|
+
* @param packages - List of packages to find changelogs for
|
|
5237
|
+
* @returns Map of project path to changelog path
|
|
5238
|
+
*/
|
|
5239
|
+
function findChangelogs(workspaceRoot, packages) {
|
|
5240
|
+
const result = createMap();
|
|
5241
|
+
for (const pkg of packages) {
|
|
5242
|
+
const changelogPath = findProjectChangelog(pkg.path);
|
|
5243
|
+
if (changelogPath) {
|
|
5244
|
+
result.set(pkg.path, changelogPath);
|
|
5245
|
+
}
|
|
5246
|
+
}
|
|
5247
|
+
return result;
|
|
5248
|
+
}
|
|
5249
|
+
/**
|
|
5250
|
+
* Finds the changelog file for a single project.
|
|
5251
|
+
* Checks for common changelog names in order of priority.
|
|
5252
|
+
*
|
|
5253
|
+
* @param projectPath - Path to project directory
|
|
5254
|
+
* @returns Absolute path to changelog or null if not found
|
|
5255
|
+
*
|
|
5256
|
+
* @example
|
|
5257
|
+
* ```typescript
|
|
5258
|
+
* import { findProjectChangelog } from '@hyperfrontend/versioning'
|
|
5259
|
+
*
|
|
5260
|
+
* const changelogPath = findProjectChangelog('./libs/my-lib')
|
|
5261
|
+
* if (changelogPath) {
|
|
5262
|
+
* console.log('Found changelog:', changelogPath)
|
|
5263
|
+
* }
|
|
5264
|
+
* ```
|
|
5265
|
+
*/
|
|
5266
|
+
function findProjectChangelog(projectPath) {
|
|
5267
|
+
for (const name of CHANGELOG_NAMES) {
|
|
5268
|
+
const changelogPath = join$1(projectPath, name);
|
|
5269
|
+
if (exists(changelogPath)) {
|
|
5270
|
+
return changelogPath;
|
|
5271
|
+
}
|
|
5272
|
+
}
|
|
5273
|
+
return null;
|
|
5274
|
+
}
|
|
5275
|
+
|
|
5276
|
+
/**
|
|
5277
|
+
* Discovers all packages within a workspace.
|
|
5278
|
+
* Finds package.json files, parses them, and optionally discovers
|
|
5279
|
+
* changelogs and internal dependencies.
|
|
5280
|
+
*
|
|
5281
|
+
* @param options - Discovery options
|
|
5282
|
+
* @returns Discovery result with all found packages
|
|
5283
|
+
* @throws {Error} If workspace root cannot be found
|
|
5284
|
+
*
|
|
5285
|
+
* @example
|
|
5286
|
+
* ```typescript
|
|
5287
|
+
* import { discoverPackages } from '@hyperfrontend/versioning'
|
|
5288
|
+
*
|
|
5289
|
+
* // Discover all packages in current workspace
|
|
5290
|
+
* const result = discoverPackages()
|
|
5291
|
+
*
|
|
5292
|
+
* // Discover with custom patterns
|
|
5293
|
+
* const result = discoverPackages({
|
|
5294
|
+
* patterns: ['packages/*\/package.json'],
|
|
5295
|
+
* includeChangelogs: true
|
|
5296
|
+
* })
|
|
5297
|
+
*
|
|
5298
|
+
* // Access discovered projects
|
|
5299
|
+
* for (const project of result.projects) {
|
|
5300
|
+
* console.log(`${project.name}@${project.version}`)
|
|
5301
|
+
* }
|
|
5302
|
+
* ```
|
|
5303
|
+
*/
|
|
5304
|
+
function discoverPackages(options = {}) {
|
|
5305
|
+
// Resolve workspace root
|
|
5306
|
+
const workspaceRoot = options.workspaceRoot ?? findWorkspaceRoot(process.cwd());
|
|
5307
|
+
if (!workspaceRoot) {
|
|
5308
|
+
throw createError('Could not find workspace root. Ensure you are in a monorepo with nx.json, turbo.json, or workspaces field.');
|
|
5309
|
+
}
|
|
5310
|
+
// Build configuration
|
|
5311
|
+
const config = {
|
|
5312
|
+
patterns: options.patterns ?? DEFAULT_WORKSPACE_CONFIG.patterns,
|
|
5313
|
+
exclude: options.exclude ?? DEFAULT_WORKSPACE_CONFIG.exclude,
|
|
5314
|
+
includeChangelogs: options.includeChangelogs ?? DEFAULT_WORKSPACE_CONFIG.includeChangelogs,
|
|
5315
|
+
trackDependencies: options.trackDependencies ?? DEFAULT_WORKSPACE_CONFIG.trackDependencies,
|
|
5316
|
+
};
|
|
5317
|
+
// Find all package.json files
|
|
5318
|
+
const packageJsonPaths = findPackageJsonFiles(workspaceRoot, config);
|
|
5319
|
+
// Parse package.json files
|
|
5320
|
+
const rawPackages = parsePackageJsonFiles(workspaceRoot, packageJsonPaths);
|
|
5321
|
+
// Collect all package names for internal dependency detection
|
|
5322
|
+
const packageNames = createSet(rawPackages.map((p) => p.name));
|
|
5323
|
+
// Find changelogs if requested
|
|
5324
|
+
const changelogMap = config.includeChangelogs ? findChangelogs(workspaceRoot, rawPackages) : createMap();
|
|
5325
|
+
// Build projects with changelog paths
|
|
5326
|
+
const rawWithChangelogs = rawPackages.map((pkg) => ({
|
|
5327
|
+
...pkg,
|
|
5328
|
+
changelogPath: changelogMap.get(pkg.path) ?? null,
|
|
5329
|
+
}));
|
|
5330
|
+
// Calculate internal dependencies
|
|
5331
|
+
const projects = config.trackDependencies
|
|
5332
|
+
? buildProjectsWithDependencies(rawWithChangelogs, packageNames)
|
|
5333
|
+
: rawWithChangelogs.map((pkg) => createProject(pkg));
|
|
5334
|
+
// Build project map
|
|
5335
|
+
const projectMap = createMap();
|
|
5336
|
+
for (const project of projects) {
|
|
5337
|
+
projectMap.set(project.name, project);
|
|
5338
|
+
}
|
|
5339
|
+
return {
|
|
5340
|
+
projects,
|
|
5341
|
+
projectMap,
|
|
5342
|
+
packageNames,
|
|
5343
|
+
workspaceRoot,
|
|
5344
|
+
config,
|
|
5345
|
+
};
|
|
5346
|
+
}
|
|
5347
|
+
/**
|
|
5348
|
+
* Finds all package.json files matching the configured patterns.
|
|
5349
|
+
*
|
|
5350
|
+
* @param workspaceRoot - Root directory to search from
|
|
5351
|
+
* @param config - Workspace configuration
|
|
5352
|
+
* @returns Array of relative paths to package.json files
|
|
5353
|
+
*/
|
|
5354
|
+
function findPackageJsonFiles(workspaceRoot, config) {
|
|
5355
|
+
const patterns = [...config.patterns];
|
|
5356
|
+
const excludePatterns = [...config.exclude];
|
|
5357
|
+
return findFiles(workspaceRoot, patterns, {
|
|
5358
|
+
ignorePatterns: excludePatterns,
|
|
5359
|
+
});
|
|
5360
|
+
}
|
|
5361
|
+
/**
|
|
5362
|
+
* Parses package.json files and extracts metadata.
|
|
5363
|
+
*
|
|
5364
|
+
* @param workspaceRoot - Workspace root path
|
|
5365
|
+
* @param packageJsonPaths - Relative paths to package.json files
|
|
5366
|
+
* @returns Array of raw package info objects
|
|
5367
|
+
*/
|
|
5368
|
+
function parsePackageJsonFiles(workspaceRoot, packageJsonPaths) {
|
|
5369
|
+
const packages = [];
|
|
5370
|
+
for (const relativePath of packageJsonPaths) {
|
|
5371
|
+
const absolutePath = join$1(workspaceRoot, relativePath);
|
|
5372
|
+
const projectPath = dirname(absolutePath);
|
|
5373
|
+
try {
|
|
5374
|
+
const packageJson = readPackageJson(absolutePath);
|
|
5375
|
+
// Skip packages without a name
|
|
5376
|
+
if (!packageJson.name) {
|
|
5377
|
+
continue;
|
|
5378
|
+
}
|
|
5379
|
+
packages.push({
|
|
5380
|
+
name: packageJson.name,
|
|
5381
|
+
version: packageJson.version ?? '0.0.0',
|
|
5382
|
+
path: projectPath,
|
|
5383
|
+
packageJsonPath: absolutePath,
|
|
5384
|
+
packageJson,
|
|
5385
|
+
changelogPath: null,
|
|
5386
|
+
});
|
|
5387
|
+
}
|
|
5388
|
+
catch {
|
|
5389
|
+
// Skip packages that can't be parsed
|
|
5390
|
+
continue;
|
|
5391
|
+
}
|
|
5392
|
+
}
|
|
5393
|
+
return packages;
|
|
5394
|
+
}
|
|
5395
|
+
/**
|
|
5396
|
+
* Builds projects with internal dependency information.
|
|
5397
|
+
*
|
|
5398
|
+
* @param rawPackages - Raw package info objects
|
|
5399
|
+
* @param packageNames - Set of all package names
|
|
5400
|
+
* @returns Array of Project objects with dependencies populated
|
|
5401
|
+
*/
|
|
5402
|
+
function buildProjectsWithDependencies(rawPackages, packageNames) {
|
|
5403
|
+
// First pass: create projects with dependencies
|
|
5404
|
+
const projectsWithDeps = [];
|
|
5405
|
+
for (const pkg of rawPackages) {
|
|
5406
|
+
const internalDeps = findInternalDependencies(pkg.packageJson, packageNames);
|
|
5407
|
+
projectsWithDeps.push({
|
|
5408
|
+
...pkg,
|
|
5409
|
+
internalDependencies: internalDeps,
|
|
5410
|
+
});
|
|
5411
|
+
}
|
|
5412
|
+
// Build dependency -> dependents map
|
|
5413
|
+
const dependentsMap = createMap();
|
|
5414
|
+
for (const pkg of projectsWithDeps) {
|
|
5415
|
+
for (const dep of pkg.internalDependencies) {
|
|
5416
|
+
const existing = dependentsMap.get(dep) ?? [];
|
|
5417
|
+
existing.push(pkg.name);
|
|
5418
|
+
dependentsMap.set(dep, existing);
|
|
5419
|
+
}
|
|
5420
|
+
}
|
|
5421
|
+
// Second pass: add dependents to each project
|
|
5422
|
+
return projectsWithDeps.map((pkg) => {
|
|
5423
|
+
const dependents = dependentsMap.get(pkg.name) ?? [];
|
|
5424
|
+
return createProject({
|
|
5425
|
+
...pkg,
|
|
5426
|
+
internalDependents: dependents,
|
|
5427
|
+
});
|
|
5428
|
+
});
|
|
5429
|
+
}
|
|
5430
|
+
/**
|
|
5431
|
+
* Discovers a project by name within a workspace.
|
|
5432
|
+
*
|
|
5433
|
+
* @param projectName - Name of the project to find
|
|
5434
|
+
* @param options - Discovery options
|
|
5435
|
+
* @returns The project or null if not found
|
|
5436
|
+
*/
|
|
5437
|
+
function discoverProjectByName(projectName, options = {}) {
|
|
5438
|
+
const result = discoverPackages(options);
|
|
5439
|
+
return result.projectMap.get(projectName) ?? null;
|
|
5440
|
+
}
|
|
5441
|
+
|
|
5442
|
+
/**
|
|
5443
|
+
* Default scopes to exclude from changelogs.
|
|
5444
|
+
*
|
|
5445
|
+
* These represent repository-level or infrastructure changes
|
|
5446
|
+
* that typically don't belong in individual project changelogs.
|
|
5447
|
+
*/
|
|
5448
|
+
const DEFAULT_EXCLUDE_SCOPES = ['release', 'deps', 'workspace', 'root', 'repo', 'ci', 'build'];
|
|
5449
|
+
|
|
5450
|
+
/**
|
|
5451
|
+
* Creates a matcher that checks if commit scope matches any of the given scopes.
|
|
5452
|
+
*
|
|
5453
|
+
* @param scopes - Scopes to match against (case-insensitive)
|
|
5454
|
+
* @returns Matcher that returns true if scope matches
|
|
5455
|
+
*
|
|
5456
|
+
* @example
|
|
5457
|
+
* const matcher = scopeMatcher(['ci', 'build', 'tooling'])
|
|
5458
|
+
* matcher({ scope: 'CI', ... }) // true
|
|
5459
|
+
* matcher({ scope: 'feat', ... }) // false
|
|
5460
|
+
*/
|
|
5461
|
+
function scopeMatcher(scopes) {
|
|
5462
|
+
const normalizedScopes = createSet(scopes.map((s) => s.toLowerCase()));
|
|
5463
|
+
return (ctx) => {
|
|
5464
|
+
if (!ctx.scope)
|
|
5465
|
+
return false;
|
|
5466
|
+
return normalizedScopes.has(ctx.scope.toLowerCase());
|
|
5467
|
+
};
|
|
5468
|
+
}
|
|
5469
|
+
/**
|
|
5470
|
+
* Creates a matcher that checks if commit scope starts with any of the given prefixes.
|
|
5471
|
+
*
|
|
5472
|
+
* @param prefixes - Scope prefixes to match (case-insensitive)
|
|
5473
|
+
* @returns Matcher that returns true if scope starts with any prefix
|
|
5474
|
+
*
|
|
5475
|
+
* @example
|
|
5476
|
+
* const matcher = scopePrefixMatcher(['tool-', 'infra-'])
|
|
5477
|
+
* matcher({ scope: 'tool-package', ... }) // true
|
|
5478
|
+
* matcher({ scope: 'lib-utils', ... }) // false
|
|
5479
|
+
*/
|
|
5480
|
+
function scopePrefixMatcher(prefixes) {
|
|
5481
|
+
const normalizedPrefixes = prefixes.map((p) => p.toLowerCase());
|
|
5482
|
+
return (ctx) => {
|
|
5483
|
+
if (!ctx.scope)
|
|
5484
|
+
return false;
|
|
5485
|
+
const normalizedScope = ctx.scope.toLowerCase();
|
|
5486
|
+
return normalizedPrefixes.some((prefix) => normalizedScope.startsWith(prefix));
|
|
5487
|
+
};
|
|
5488
|
+
}
|
|
5489
|
+
/**
|
|
5490
|
+
* Combines matchers with OR logic - returns true if ANY matcher matches.
|
|
5491
|
+
*
|
|
5492
|
+
* @param matchers - Matchers to combine
|
|
5493
|
+
* @returns Combined matcher
|
|
5494
|
+
*
|
|
5495
|
+
* @example
|
|
5496
|
+
* const combined = anyOf(
|
|
5497
|
+
* scopeMatcher(['ci', 'build']),
|
|
5498
|
+
* messageMatcher(['[infra]']),
|
|
5499
|
+
* custom((ctx) => ctx.scope?.startsWith('tool-'))
|
|
5500
|
+
* )
|
|
5501
|
+
*/
|
|
5502
|
+
function anyOf(...matchers) {
|
|
5503
|
+
return (ctx) => matchers.some((matcher) => matcher(ctx));
|
|
5504
|
+
}
|
|
5505
|
+
/**
|
|
5506
|
+
* Matches common CI/CD scopes.
|
|
5507
|
+
*
|
|
5508
|
+
* Matches: ci, cd, build, pipeline, workflow, actions
|
|
5509
|
+
*/
|
|
5510
|
+
const CI_SCOPE_MATCHER = scopeMatcher(['ci', 'cd', 'build', 'pipeline', 'workflow', 'actions']);
|
|
5511
|
+
/**
|
|
5512
|
+
* Matches common tooling/workspace scopes.
|
|
5513
|
+
*
|
|
5514
|
+
* Matches: tooling, workspace, monorepo, nx, root
|
|
5515
|
+
*/
|
|
5516
|
+
const TOOLING_SCOPE_MATCHER = scopeMatcher(['tooling', 'workspace', 'monorepo', 'nx', 'root']);
|
|
5517
|
+
/**
|
|
5518
|
+
* Matches tool-prefixed scopes (e.g., tool-package, tool-scripts).
|
|
5519
|
+
*/
|
|
5520
|
+
const TOOL_PREFIX_MATCHER = scopePrefixMatcher(['tool-']);
|
|
5521
|
+
/**
|
|
5522
|
+
* Combined matcher for common infrastructure patterns.
|
|
5523
|
+
*
|
|
5524
|
+
* Combines CI, tooling, and tool-prefix matchers.
|
|
5525
|
+
*/
|
|
5526
|
+
anyOf(CI_SCOPE_MATCHER, TOOLING_SCOPE_MATCHER, TOOL_PREFIX_MATCHER);
|
|
5527
|
+
|
|
5528
|
+
/**
|
|
5529
|
+
* Default scope filtering configuration.
|
|
5530
|
+
*
|
|
5531
|
+
* Uses DEFAULT_EXCLUDE_SCOPES from commits/classify to ensure consistency
|
|
5532
|
+
* between flow-level filtering and commit classification.
|
|
5533
|
+
*/
|
|
5534
|
+
const DEFAULT_SCOPE_FILTERING_CONFIG = {
|
|
5535
|
+
strategy: 'hybrid',
|
|
5536
|
+
includeScopes: [],
|
|
5537
|
+
excludeScopes: DEFAULT_EXCLUDE_SCOPES,
|
|
5538
|
+
trackDependencyChanges: false,
|
|
5539
|
+
infrastructure: undefined,
|
|
5540
|
+
infrastructureMatcher: undefined,
|
|
5541
|
+
};
|
|
4048
5542
|
/**
|
|
4049
5543
|
* Default flow configuration values.
|
|
4050
5544
|
*/
|
|
@@ -4065,50 +5559,90 @@ const DEFAULT_FLOW_CONFIG = {
|
|
|
4065
5559
|
allowPrerelease: false,
|
|
4066
5560
|
prereleaseId: 'alpha',
|
|
4067
5561
|
releaseAs: undefined,
|
|
5562
|
+
repository: undefined,
|
|
5563
|
+
scopeFiltering: DEFAULT_SCOPE_FILTERING_CONFIG,
|
|
4068
5564
|
};
|
|
4069
5565
|
|
|
4070
5566
|
/**
|
|
4071
|
-
*
|
|
5567
|
+
* Discovers project root using multiple strategies.
|
|
4072
5568
|
*
|
|
4073
|
-
*
|
|
4074
|
-
*
|
|
5569
|
+
* Resolution order:
|
|
5570
|
+
* 1. Explicit `projectRoot` option (from Nx executor)
|
|
5571
|
+
* 2. Nx project discovery via `discoverNxProjects` (if in Nx workspace)
|
|
5572
|
+
* 3. Workspace discovery via `discoverProjectByName`
|
|
4075
5573
|
*
|
|
4076
5574
|
* @param workspaceRoot - Workspace root path
|
|
4077
5575
|
* @param projectName - Project name (e.g., 'lib-versioning')
|
|
4078
|
-
* @
|
|
4079
|
-
|
|
4080
|
-
|
|
4081
|
-
|
|
4082
|
-
|
|
4083
|
-
|
|
4084
|
-
if (
|
|
4085
|
-
|
|
4086
|
-
|
|
5576
|
+
* @param providedRoot - Explicitly provided project root (optional)
|
|
5577
|
+
* @param logger - Logger instance
|
|
5578
|
+
* @returns Resolution result with path and source, or null if not found
|
|
5579
|
+
*/
|
|
5580
|
+
function discoverProjectRoot(workspaceRoot, projectName, providedRoot, logger) {
|
|
5581
|
+
// 1. Explicit projectRoot provided (preferred - from Nx executor)
|
|
5582
|
+
if (providedRoot) {
|
|
5583
|
+
const projectRoot = providedRoot.startsWith(workspaceRoot) ? providedRoot : `${workspaceRoot}/${providedRoot}`;
|
|
5584
|
+
logger.debug(`Using provided project root: ${providedRoot}`);
|
|
5585
|
+
return { projectRoot, source: 'provided' };
|
|
5586
|
+
}
|
|
5587
|
+
// 2. Try Nx project discovery (fast, if we're in an Nx workspace)
|
|
5588
|
+
if (isNxWorkspace(workspaceRoot)) {
|
|
5589
|
+
logger.debug('Nx workspace detected, attempting Nx project discovery');
|
|
5590
|
+
try {
|
|
5591
|
+
const nxProjects = discoverNxProjects(workspaceRoot);
|
|
5592
|
+
const nxConfig = nxProjects.get(projectName);
|
|
5593
|
+
if (nxConfig?.root) {
|
|
5594
|
+
const projectRoot = `${workspaceRoot}/${nxConfig.root}`;
|
|
5595
|
+
logger.debug(`Discovered project root via Nx: ${nxConfig.root}`);
|
|
5596
|
+
return { projectRoot, source: 'nx-discovery' };
|
|
5597
|
+
}
|
|
5598
|
+
logger.debug(`Project "${projectName}" not found in Nx project graph`);
|
|
5599
|
+
}
|
|
5600
|
+
catch (error) {
|
|
5601
|
+
logger.debug(`Nx project discovery failed: ${error}`);
|
|
5602
|
+
}
|
|
5603
|
+
}
|
|
5604
|
+
// 3. Try workspace discovery (handles any monorepo structure)
|
|
5605
|
+
logger.debug('Attempting workspace discovery via discoverProjectByName');
|
|
5606
|
+
try {
|
|
5607
|
+
const project = discoverProjectByName(projectName, { workspaceRoot });
|
|
5608
|
+
if (project) {
|
|
5609
|
+
logger.debug(`Discovered project root via workspace discovery: ${project.path}`);
|
|
5610
|
+
return { projectRoot: project.path, source: 'workspace-discovery' };
|
|
5611
|
+
}
|
|
5612
|
+
logger.debug(`Project "${projectName}" not found via workspace discovery`);
|
|
4087
5613
|
}
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
prefix = 'apps';
|
|
5614
|
+
catch (error) {
|
|
5615
|
+
logger.debug(`Workspace discovery failed: ${error}`);
|
|
4091
5616
|
}
|
|
4092
|
-
|
|
5617
|
+
// All discovery methods failed
|
|
5618
|
+
logger.error(`Could not discover project "${projectName}" in workspace "${workspaceRoot}". ` +
|
|
5619
|
+
`Ensure the project exists and has a valid package.json, or pass projectRoot explicitly.`);
|
|
5620
|
+
return null;
|
|
4093
5621
|
}
|
|
4094
5622
|
/**
|
|
4095
5623
|
* Resolves the package name from the project root.
|
|
4096
5624
|
*
|
|
4097
5625
|
* @param tree - Virtual file system tree
|
|
4098
5626
|
* @param projectRoot - Project root path
|
|
5627
|
+
* @param logger - Logger instance for diagnostics
|
|
4099
5628
|
* @returns Package name from package.json
|
|
4100
5629
|
*/
|
|
4101
|
-
function resolvePackageName(tree, projectRoot) {
|
|
5630
|
+
function resolvePackageName(tree, projectRoot, logger) {
|
|
4102
5631
|
const packageJsonPath = `${projectRoot}/package.json`;
|
|
4103
5632
|
try {
|
|
4104
5633
|
const content = tree.read(packageJsonPath, 'utf-8');
|
|
4105
5634
|
if (!content) {
|
|
5635
|
+
logger.debug(`package.json is empty or not found at ${packageJsonPath}`);
|
|
4106
5636
|
return 'unknown';
|
|
4107
5637
|
}
|
|
4108
5638
|
const pkg = parse(content);
|
|
5639
|
+
if (!pkg.name) {
|
|
5640
|
+
logger.debug(`package.json at ${packageJsonPath} has no name field`);
|
|
5641
|
+
}
|
|
4109
5642
|
return pkg.name ?? 'unknown';
|
|
4110
5643
|
}
|
|
4111
|
-
catch {
|
|
5644
|
+
catch (error) {
|
|
5645
|
+
logger.debug(`Failed to read package.json at ${packageJsonPath}: ${error}`);
|
|
4112
5646
|
return 'unknown';
|
|
4113
5647
|
}
|
|
4114
5648
|
}
|
|
@@ -4185,9 +5719,37 @@ async function executeFlow(flow, projectName, workspaceRoot, options = {}) {
|
|
|
4185
5719
|
const tree = options.tree ?? createTree(workspaceRoot);
|
|
4186
5720
|
const registry = options.registry ?? createRegistry('npm');
|
|
4187
5721
|
const git = options.git ?? createGitClient({ ...DEFAULT_GIT_CLIENT_CONFIG, cwd: workspaceRoot });
|
|
4188
|
-
// Resolve
|
|
4189
|
-
const
|
|
4190
|
-
|
|
5722
|
+
// Resolve project root with smart discovery
|
|
5723
|
+
const resolution = discoverProjectRoot(workspaceRoot, projectName, options.projectRoot, flowLogger);
|
|
5724
|
+
// Fail early if project cannot be discovered
|
|
5725
|
+
if (!resolution) {
|
|
5726
|
+
return {
|
|
5727
|
+
status: 'failed',
|
|
5728
|
+
steps: [],
|
|
5729
|
+
state: {},
|
|
5730
|
+
duration: dateNow() - startTime,
|
|
5731
|
+
summary: `Project "${projectName}" not found in workspace`,
|
|
5732
|
+
};
|
|
5733
|
+
}
|
|
5734
|
+
const { projectRoot, source: projectRootSource } = resolution;
|
|
5735
|
+
// Early validation: ensure project root is valid
|
|
5736
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
5737
|
+
if (!tree.exists(packageJsonPath)) {
|
|
5738
|
+
const errorMsg = `Project root validation failed: ${packageJsonPath} does not exist. ` +
|
|
5739
|
+
`Resolved projectRoot="${projectRoot}" (source: ${projectRootSource}) from projectName="${projectName}".`;
|
|
5740
|
+
flowLogger.error(errorMsg);
|
|
5741
|
+
return {
|
|
5742
|
+
status: 'failed',
|
|
5743
|
+
steps: [],
|
|
5744
|
+
state: {},
|
|
5745
|
+
duration: dateNow() - startTime,
|
|
5746
|
+
summary: `Invalid project root: ${projectRoot}`,
|
|
5747
|
+
};
|
|
5748
|
+
}
|
|
5749
|
+
const packageName = resolvePackageName(tree, projectRoot, flowLogger);
|
|
5750
|
+
if (packageName === 'unknown') {
|
|
5751
|
+
flowLogger.warn(`Could not read package name from ${packageJsonPath}`);
|
|
5752
|
+
}
|
|
4191
5753
|
// Initialize context
|
|
4192
5754
|
const context = {
|
|
4193
5755
|
workspaceRoot,
|