@storm-software/git-tools 2.89.18 → 2.89.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/bin/{chunk-OLYGBFVX.js → chunk-5ZJZTDLU.js} +41 -24
- package/bin/{chunk-DEGLIS62.cjs → chunk-WS6FE5KD.cjs} +41 -24
- package/bin/git.cjs +69 -69
- package/bin/git.js +1 -1
- package/bin/post-checkout.cjs +9 -9
- package/bin/post-checkout.js +1 -1
- package/bin/post-commit.cjs +9 -9
- package/bin/post-commit.js +1 -1
- package/bin/post-merge.cjs +9 -9
- package/bin/post-merge.js +1 -1
- package/bin/pre-commit.cjs +9 -9
- package/bin/pre-commit.js +1 -1
- package/bin/pre-install.cjs +9 -9
- package/bin/pre-install.js +1 -1
- package/bin/pre-push.cjs +13 -13
- package/bin/pre-push.js +1 -1
- package/bin/prepare.cjs +7 -7
- package/bin/prepare.js +1 -1
- package/bin/version-warning.cjs +5 -5
- package/bin/version-warning.js +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -21,7 +21,7 @@ This package is part of the <b>⚡Storm-Ops</b> monorepo. The Storm-Ops packages
|
|
|
21
21
|
|
|
22
22
|
<h3 align="center">💻 Visit <a href="https://stormsoftware.com" target="_blank">stormsoftware.com</a> to stay up to date with this developer</h3><br />
|
|
23
23
|
|
|
24
|
-
[](https://prettier.io/) [](http://nx.dev/) [](https://nextjs.org/) [](http://commitizen.github.io/cz-cli/)  [](https://fumadocs.vercel.app/) 
|
|
25
25
|
|
|
26
26
|
<!-- prettier-ignore-start -->
|
|
27
27
|
<!-- markdownlint-disable -->
|
|
@@ -39634,7 +39634,7 @@ var WorkspaceDirectoryConfigSchema = z.object({
|
|
|
39634
39634
|
build: z.string().trim().default("dist").describe("The directory used to store the workspace's distributable files after a build (relative to the workspace root)")
|
|
39635
39635
|
}).describe("Various directories used by the workspace to store data, cache, and configuration files");
|
|
39636
39636
|
var StormConfigSchema = z.object({
|
|
39637
|
-
$schema: z.string().trim().default("https://cdn.jsdelivr.net/npm/@storm-software/config/schemas/storm.schema.json").optional().nullish().describe("The URL to the JSON schema file that describes the Storm configuration file"),
|
|
39637
|
+
$schema: z.string().trim().default("https://cdn.jsdelivr.net/npm/@storm-software/config/schemas/storm-workspace.schema.json").optional().nullish().describe("The URL to the JSON schema file that describes the Storm configuration file"),
|
|
39638
39638
|
extends: ExtendsSchema.optional(),
|
|
39639
39639
|
name: z.string().trim().toLowerCase().optional().describe("The name of the service/package/scope using this configuration"),
|
|
39640
39640
|
namespace: z.string().trim().toLowerCase().optional().describe("The namespace of the package"),
|
|
@@ -39676,6 +39676,7 @@ var StormConfigSchema = z.object({
|
|
|
39676
39676
|
"trace",
|
|
39677
39677
|
"all"
|
|
39678
39678
|
]).default("info").describe("The log level used to filter out lower priority log messages. If not provided, this is defaulted using the `environment` config value (if `environment` is set to `production` then `level` is `error`, else `level` is `debug`)."),
|
|
39679
|
+
skipConfigLogging: z.boolean().optional().describe("Should the logging of the current Storm Workspace configuration be skipped?"),
|
|
39679
39680
|
registry: RegistryConfigSchema,
|
|
39680
39681
|
configFile: z.string().trim().nullable().default(null).describe("The filepath of the Storm config. When this field is null, no config file was found in the current workspace."),
|
|
39681
39682
|
colors: ColorConfigSchema.or(ColorConfigMapSchema).describe("Storm theme config values used for styling various package elements"),
|
|
@@ -39844,14 +39845,17 @@ import { existsSync } from "node:fs";
|
|
|
39844
39845
|
import { join as join2 } from "node:path";
|
|
39845
39846
|
var MAX_PATH_SEARCH_DEPTH = 30;
|
|
39846
39847
|
var depth = 0;
|
|
39847
|
-
function findFolderUp(startPath, endFileNames) {
|
|
39848
|
+
function findFolderUp(startPath, endFileNames = [], endDirectoryNames = []) {
|
|
39848
39849
|
const _startPath = startPath ?? process.cwd();
|
|
39850
|
+
if (endDirectoryNames.some((endDirName) => existsSync(join2(_startPath, endDirName)))) {
|
|
39851
|
+
return _startPath;
|
|
39852
|
+
}
|
|
39849
39853
|
if (endFileNames.some((endFileName) => existsSync(join2(_startPath, endFileName)))) {
|
|
39850
39854
|
return _startPath;
|
|
39851
39855
|
}
|
|
39852
39856
|
if (_startPath !== "/" && depth++ < MAX_PATH_SEARCH_DEPTH) {
|
|
39853
39857
|
const parent = join2(_startPath, "..");
|
|
39854
|
-
return findFolderUp(parent, endFileNames);
|
|
39858
|
+
return findFolderUp(parent, endFileNames, endDirectoryNames);
|
|
39855
39859
|
}
|
|
39856
39860
|
return void 0;
|
|
39857
39861
|
}
|
|
@@ -39859,17 +39863,17 @@ __name(findFolderUp, "findFolderUp");
|
|
|
39859
39863
|
|
|
39860
39864
|
// ../config-tools/src/utilities/find-workspace-root.ts
|
|
39861
39865
|
var rootFiles = [
|
|
39862
|
-
"storm.json",
|
|
39863
|
-
"storm.json",
|
|
39864
|
-
"storm.yaml",
|
|
39865
|
-
"storm.yml",
|
|
39866
|
-
"storm.js",
|
|
39867
|
-
"storm.ts",
|
|
39868
|
-
".storm.json",
|
|
39869
|
-
".storm.yaml",
|
|
39870
|
-
".storm.yml",
|
|
39871
|
-
".storm.js",
|
|
39872
|
-
".storm.ts",
|
|
39866
|
+
"storm-workspace.json",
|
|
39867
|
+
"storm-workspace.json",
|
|
39868
|
+
"storm-workspace.yaml",
|
|
39869
|
+
"storm-workspace.yml",
|
|
39870
|
+
"storm-workspace.js",
|
|
39871
|
+
"storm-workspace.ts",
|
|
39872
|
+
".storm-workspace.json",
|
|
39873
|
+
".storm-workspace.yaml",
|
|
39874
|
+
".storm-workspace.yml",
|
|
39875
|
+
".storm-workspace.js",
|
|
39876
|
+
".storm-workspace.ts",
|
|
39873
39877
|
"lerna.json",
|
|
39874
39878
|
"nx.json",
|
|
39875
39879
|
"turbo.json",
|
|
@@ -39893,11 +39897,18 @@ var rootFiles = [
|
|
|
39893
39897
|
"pnpm-lock.yml",
|
|
39894
39898
|
"bun.lockb"
|
|
39895
39899
|
];
|
|
39900
|
+
var rootDirectories = [
|
|
39901
|
+
".storm-workspace",
|
|
39902
|
+
".nx",
|
|
39903
|
+
".github",
|
|
39904
|
+
".vscode",
|
|
39905
|
+
".verdaccio"
|
|
39906
|
+
];
|
|
39896
39907
|
function findWorkspaceRootSafe(pathInsideMonorepo) {
|
|
39897
39908
|
if (process.env.STORM_WORKSPACE_ROOT || process.env.NX_WORKSPACE_ROOT_PATH) {
|
|
39898
39909
|
return correctPaths(process.env.STORM_WORKSPACE_ROOT ?? process.env.NX_WORKSPACE_ROOT_PATH);
|
|
39899
39910
|
}
|
|
39900
|
-
return correctPaths(findFolderUp(pathInsideMonorepo ?? process.cwd(), rootFiles));
|
|
39911
|
+
return correctPaths(findFolderUp(pathInsideMonorepo ?? process.cwd(), rootFiles, rootDirectories));
|
|
39901
39912
|
}
|
|
39902
39913
|
__name(findWorkspaceRootSafe, "findWorkspaceRootSafe");
|
|
39903
39914
|
function findWorkspaceRoot(pathInsideMonorepo) {
|
|
@@ -41572,10 +41583,10 @@ var getConfigFileByName = /* @__PURE__ */ __name(async (fileName, filePath, opti
|
|
|
41572
41583
|
}, "getConfigFileByName");
|
|
41573
41584
|
var getConfigFile = /* @__PURE__ */ __name(async (filePath, additionalFileNames = []) => {
|
|
41574
41585
|
const workspacePath = filePath ? filePath : findWorkspaceRoot(filePath);
|
|
41575
|
-
const result = await getConfigFileByName("storm", workspacePath);
|
|
41586
|
+
const result = await getConfigFileByName("storm-workspace", workspacePath);
|
|
41576
41587
|
let config = result.config;
|
|
41577
41588
|
const configFile = result.configFile;
|
|
41578
|
-
if (config && configFile && Object.keys(config).length > 0) {
|
|
41589
|
+
if (config && configFile && Object.keys(config).length > 0 && !config.skipConfigLogging) {
|
|
41579
41590
|
writeTrace(`Found Storm configuration file "${configFile.includes(`${workspacePath}/`) ? configFile.replace(`${workspacePath}/`, "") : configFile}" at "${workspacePath}"`, {
|
|
41580
41591
|
logLevel: "all"
|
|
41581
41592
|
});
|
|
@@ -41584,9 +41595,11 @@ var getConfigFile = /* @__PURE__ */ __name(async (filePath, additionalFileNames
|
|
|
41584
41595
|
const results = await Promise.all(additionalFileNames.map((fileName) => getConfigFileByName(fileName, workspacePath)));
|
|
41585
41596
|
for (const result2 of results) {
|
|
41586
41597
|
if (result2?.config && result2?.configFile && Object.keys(result2.config).length > 0) {
|
|
41587
|
-
|
|
41588
|
-
|
|
41589
|
-
|
|
41598
|
+
if (!config.skipConfigLogging && !result2.config.skipConfigLogging) {
|
|
41599
|
+
writeTrace(`Found alternative configuration file "${result2.configFile.includes(`${workspacePath}/`) ? result2.configFile.replace(`${workspacePath}/`, "") : result2.configFile}" at "${workspacePath}"`, {
|
|
41600
|
+
logLevel: "all"
|
|
41601
|
+
});
|
|
41602
|
+
}
|
|
41590
41603
|
config = defu(result2.config ?? {}, config ?? {});
|
|
41591
41604
|
}
|
|
41592
41605
|
}
|
|
@@ -41660,7 +41673,8 @@ var getConfigEnv = /* @__PURE__ */ __name(() => {
|
|
|
41660
41673
|
cyclone: process.env[`${prefix}REGISTRY_CYCLONE`] || void 0,
|
|
41661
41674
|
container: process.env[`${prefix}REGISTRY_CONTAINER`] || void 0
|
|
41662
41675
|
},
|
|
41663
|
-
logLevel: process.env[`${prefix}LOG_LEVEL`] !== null && process.env[`${prefix}LOG_LEVEL`] !== void 0 ? process.env[`${prefix}LOG_LEVEL`] && Number.isSafeInteger(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) ? getLogLevelLabel(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) : process.env[`${prefix}LOG_LEVEL`] : void 0
|
|
41676
|
+
logLevel: process.env[`${prefix}LOG_LEVEL`] !== null && process.env[`${prefix}LOG_LEVEL`] !== void 0 ? process.env[`${prefix}LOG_LEVEL`] && Number.isSafeInteger(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) ? getLogLevelLabel(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) : process.env[`${prefix}LOG_LEVEL`] : void 0,
|
|
41677
|
+
skipConfigLogging: process.env[`${prefix}SKIP_CONFIG_LOGGING`] !== void 0 ? Boolean(process.env[`${prefix}SKIP_CONFIG_LOGGING`]) : void 0
|
|
41664
41678
|
};
|
|
41665
41679
|
const themeNames = Object.keys(process.env).filter((envKey) => envKey.startsWith(`${prefix}COLOR_`) && COLOR_KEYS.every((colorKey) => !envKey.startsWith(`${prefix}COLOR_LIGHT_${colorKey}`) && !envKey.startsWith(`${prefix}COLOR_DARK_${colorKey}`)));
|
|
41666
41680
|
config.colors = themeNames.length > 0 ? themeNames.reduce((ret, themeName) => {
|
|
@@ -41896,6 +41910,9 @@ var setConfigEnv = /* @__PURE__ */ __name((config) => {
|
|
|
41896
41910
|
process.env.NX_VERBOSE_LOGGING = String(getLogLevel(config.logLevel) >= LogLevel.DEBUG ? true : false);
|
|
41897
41911
|
process.env.RUST_BACKTRACE = getLogLevel(config.logLevel) >= LogLevel.DEBUG ? "full" : "none";
|
|
41898
41912
|
}
|
|
41913
|
+
if (config.skipConfigLogging !== void 0) {
|
|
41914
|
+
process.env[`${prefix}SKIP_CONFIG_LOGGING`] = String(config.skipConfigLogging);
|
|
41915
|
+
}
|
|
41899
41916
|
process.env[`${prefix}CONFIG`] = JSON.stringify(config);
|
|
41900
41917
|
for (const key of Object.keys(config.extensions ?? {})) {
|
|
41901
41918
|
config.extensions[key] && Object.keys(config.extensions[key]) && setExtensionEnv(key, config.extensions[key]);
|
|
@@ -42013,7 +42030,7 @@ var createStormConfig = /* @__PURE__ */ __name(async (extensionName, schema, wor
|
|
|
42013
42030
|
const defaultConfig = await getDefaultConfig(_workspaceRoot);
|
|
42014
42031
|
const configFile = await getConfigFile(_workspaceRoot);
|
|
42015
42032
|
if (!configFile && !skipLogs) {
|
|
42016
|
-
writeWarning("No Storm
|
|
42033
|
+
writeWarning("No Storm Workspace configuration file found in the current repository. Please ensure this is the expected behavior - you can add a `storm-workspace.json` file to the root of your workspace if it is not.\n", {
|
|
42017
42034
|
logLevel: "all"
|
|
42018
42035
|
});
|
|
42019
42036
|
}
|
|
@@ -42051,8 +42068,8 @@ var createConfigExtension = /* @__PURE__ */ __name((extensionName, schema) => {
|
|
|
42051
42068
|
var loadStormConfig = /* @__PURE__ */ __name(async (workspaceRoot, skipLogs = false) => {
|
|
42052
42069
|
const config = await createStormConfig(void 0, void 0, workspaceRoot, skipLogs);
|
|
42053
42070
|
setConfigEnv(config);
|
|
42054
|
-
if (!skipLogs) {
|
|
42055
|
-
writeTrace(`\u2699\uFE0F Using Storm configuration:
|
|
42071
|
+
if (!skipLogs && !config.skipConfigLogging) {
|
|
42072
|
+
writeTrace(`\u2699\uFE0F Using Storm Workspace configuration:
|
|
42056
42073
|
${formatLogMessage(config)}`, config);
|
|
42057
42074
|
}
|
|
42058
42075
|
return config;
|
|
@@ -39634,7 +39634,7 @@ var WorkspaceDirectoryConfigSchema = z.object({
|
|
|
39634
39634
|
build: z.string().trim().default("dist").describe("The directory used to store the workspace's distributable files after a build (relative to the workspace root)")
|
|
39635
39635
|
}).describe("Various directories used by the workspace to store data, cache, and configuration files");
|
|
39636
39636
|
var StormConfigSchema = z.object({
|
|
39637
|
-
$schema: z.string().trim().default("https://cdn.jsdelivr.net/npm/@storm-software/config/schemas/storm.schema.json").optional().nullish().describe("The URL to the JSON schema file that describes the Storm configuration file"),
|
|
39637
|
+
$schema: z.string().trim().default("https://cdn.jsdelivr.net/npm/@storm-software/config/schemas/storm-workspace.schema.json").optional().nullish().describe("The URL to the JSON schema file that describes the Storm configuration file"),
|
|
39638
39638
|
extends: ExtendsSchema.optional(),
|
|
39639
39639
|
name: z.string().trim().toLowerCase().optional().describe("The name of the service/package/scope using this configuration"),
|
|
39640
39640
|
namespace: z.string().trim().toLowerCase().optional().describe("The namespace of the package"),
|
|
@@ -39676,6 +39676,7 @@ var StormConfigSchema = z.object({
|
|
|
39676
39676
|
"trace",
|
|
39677
39677
|
"all"
|
|
39678
39678
|
]).default("info").describe("The log level used to filter out lower priority log messages. If not provided, this is defaulted using the `environment` config value (if `environment` is set to `production` then `level` is `error`, else `level` is `debug`)."),
|
|
39679
|
+
skipConfigLogging: z.boolean().optional().describe("Should the logging of the current Storm Workspace configuration be skipped?"),
|
|
39679
39680
|
registry: RegistryConfigSchema,
|
|
39680
39681
|
configFile: z.string().trim().nullable().default(null).describe("The filepath of the Storm config. When this field is null, no config file was found in the current workspace."),
|
|
39681
39682
|
colors: ColorConfigSchema.or(ColorConfigMapSchema).describe("Storm theme config values used for styling various package elements"),
|
|
@@ -39844,14 +39845,17 @@ _chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
|
39844
39845
|
|
|
39845
39846
|
var MAX_PATH_SEARCH_DEPTH = 30;
|
|
39846
39847
|
var depth = 0;
|
|
39847
|
-
function findFolderUp(startPath, endFileNames) {
|
|
39848
|
+
function findFolderUp(startPath, endFileNames = [], endDirectoryNames = []) {
|
|
39848
39849
|
const _startPath = _nullishCoalesce(startPath, () => ( process.cwd()));
|
|
39850
|
+
if (endDirectoryNames.some((endDirName) => _fs3.existsSync.call(void 0, _path2.join.call(void 0, _startPath, endDirName)))) {
|
|
39851
|
+
return _startPath;
|
|
39852
|
+
}
|
|
39849
39853
|
if (endFileNames.some((endFileName) => _fs3.existsSync.call(void 0, _path2.join.call(void 0, _startPath, endFileName)))) {
|
|
39850
39854
|
return _startPath;
|
|
39851
39855
|
}
|
|
39852
39856
|
if (_startPath !== "/" && depth++ < MAX_PATH_SEARCH_DEPTH) {
|
|
39853
39857
|
const parent = _path2.join.call(void 0, _startPath, "..");
|
|
39854
|
-
return findFolderUp(parent, endFileNames);
|
|
39858
|
+
return findFolderUp(parent, endFileNames, endDirectoryNames);
|
|
39855
39859
|
}
|
|
39856
39860
|
return void 0;
|
|
39857
39861
|
}
|
|
@@ -39859,17 +39863,17 @@ _chunkEM6PLOYYcjs.__name.call(void 0, findFolderUp, "findFolderUp");
|
|
|
39859
39863
|
|
|
39860
39864
|
// ../config-tools/src/utilities/find-workspace-root.ts
|
|
39861
39865
|
var rootFiles = [
|
|
39862
|
-
"storm.json",
|
|
39863
|
-
"storm.json",
|
|
39864
|
-
"storm.yaml",
|
|
39865
|
-
"storm.yml",
|
|
39866
|
-
"storm.js",
|
|
39867
|
-
"storm.ts",
|
|
39868
|
-
".storm.json",
|
|
39869
|
-
".storm.yaml",
|
|
39870
|
-
".storm.yml",
|
|
39871
|
-
".storm.js",
|
|
39872
|
-
".storm.ts",
|
|
39866
|
+
"storm-workspace.json",
|
|
39867
|
+
"storm-workspace.json",
|
|
39868
|
+
"storm-workspace.yaml",
|
|
39869
|
+
"storm-workspace.yml",
|
|
39870
|
+
"storm-workspace.js",
|
|
39871
|
+
"storm-workspace.ts",
|
|
39872
|
+
".storm-workspace.json",
|
|
39873
|
+
".storm-workspace.yaml",
|
|
39874
|
+
".storm-workspace.yml",
|
|
39875
|
+
".storm-workspace.js",
|
|
39876
|
+
".storm-workspace.ts",
|
|
39873
39877
|
"lerna.json",
|
|
39874
39878
|
"nx.json",
|
|
39875
39879
|
"turbo.json",
|
|
@@ -39893,11 +39897,18 @@ var rootFiles = [
|
|
|
39893
39897
|
"pnpm-lock.yml",
|
|
39894
39898
|
"bun.lockb"
|
|
39895
39899
|
];
|
|
39900
|
+
var rootDirectories = [
|
|
39901
|
+
".storm-workspace",
|
|
39902
|
+
".nx",
|
|
39903
|
+
".github",
|
|
39904
|
+
".vscode",
|
|
39905
|
+
".verdaccio"
|
|
39906
|
+
];
|
|
39896
39907
|
function findWorkspaceRootSafe(pathInsideMonorepo) {
|
|
39897
39908
|
if (process.env.STORM_WORKSPACE_ROOT || process.env.NX_WORKSPACE_ROOT_PATH) {
|
|
39898
39909
|
return correctPaths(_nullishCoalesce(process.env.STORM_WORKSPACE_ROOT, () => ( process.env.NX_WORKSPACE_ROOT_PATH)));
|
|
39899
39910
|
}
|
|
39900
|
-
return correctPaths(findFolderUp(_nullishCoalesce(pathInsideMonorepo, () => ( process.cwd())), rootFiles));
|
|
39911
|
+
return correctPaths(findFolderUp(_nullishCoalesce(pathInsideMonorepo, () => ( process.cwd())), rootFiles, rootDirectories));
|
|
39901
39912
|
}
|
|
39902
39913
|
_chunkEM6PLOYYcjs.__name.call(void 0, findWorkspaceRootSafe, "findWorkspaceRootSafe");
|
|
39903
39914
|
function findWorkspaceRoot(pathInsideMonorepo) {
|
|
@@ -41575,10 +41586,10 @@ var getConfigFileByName = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0,
|
|
|
41575
41586
|
}, "getConfigFileByName");
|
|
41576
41587
|
var getConfigFile = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (filePath, additionalFileNames = []) => {
|
|
41577
41588
|
const workspacePath = filePath ? filePath : findWorkspaceRoot(filePath);
|
|
41578
|
-
const result = await getConfigFileByName("storm", workspacePath);
|
|
41589
|
+
const result = await getConfigFileByName("storm-workspace", workspacePath);
|
|
41579
41590
|
let config = result.config;
|
|
41580
41591
|
const configFile = result.configFile;
|
|
41581
|
-
if (config && configFile && Object.keys(config).length > 0) {
|
|
41592
|
+
if (config && configFile && Object.keys(config).length > 0 && !config.skipConfigLogging) {
|
|
41582
41593
|
writeTrace(`Found Storm configuration file "${configFile.includes(`${workspacePath}/`) ? configFile.replace(`${workspacePath}/`, "") : configFile}" at "${workspacePath}"`, {
|
|
41583
41594
|
logLevel: "all"
|
|
41584
41595
|
});
|
|
@@ -41587,9 +41598,11 @@ var getConfigFile = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async
|
|
|
41587
41598
|
const results = await Promise.all(additionalFileNames.map((fileName) => getConfigFileByName(fileName, workspacePath)));
|
|
41588
41599
|
for (const result2 of results) {
|
|
41589
41600
|
if (_optionalChain([result2, 'optionalAccess', _130 => _130.config]) && _optionalChain([result2, 'optionalAccess', _131 => _131.configFile]) && Object.keys(result2.config).length > 0) {
|
|
41590
|
-
|
|
41591
|
-
|
|
41592
|
-
|
|
41601
|
+
if (!config.skipConfigLogging && !result2.config.skipConfigLogging) {
|
|
41602
|
+
writeTrace(`Found alternative configuration file "${result2.configFile.includes(`${workspacePath}/`) ? result2.configFile.replace(`${workspacePath}/`, "") : result2.configFile}" at "${workspacePath}"`, {
|
|
41603
|
+
logLevel: "all"
|
|
41604
|
+
});
|
|
41605
|
+
}
|
|
41593
41606
|
config = _chunkJMRHG3KScjs.defu.call(void 0, _nullishCoalesce(result2.config, () => ( {})), _nullishCoalesce(config, () => ( {})));
|
|
41594
41607
|
}
|
|
41595
41608
|
}
|
|
@@ -41663,7 +41676,8 @@ var getConfigEnv = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, () => {
|
|
|
41663
41676
|
cyclone: process.env[`${prefix}REGISTRY_CYCLONE`] || void 0,
|
|
41664
41677
|
container: process.env[`${prefix}REGISTRY_CONTAINER`] || void 0
|
|
41665
41678
|
},
|
|
41666
|
-
logLevel: process.env[`${prefix}LOG_LEVEL`] !== null && process.env[`${prefix}LOG_LEVEL`] !== void 0 ? process.env[`${prefix}LOG_LEVEL`] && Number.isSafeInteger(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) ? getLogLevelLabel(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) : process.env[`${prefix}LOG_LEVEL`] : void 0
|
|
41679
|
+
logLevel: process.env[`${prefix}LOG_LEVEL`] !== null && process.env[`${prefix}LOG_LEVEL`] !== void 0 ? process.env[`${prefix}LOG_LEVEL`] && Number.isSafeInteger(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) ? getLogLevelLabel(Number.parseInt(process.env[`${prefix}LOG_LEVEL`])) : process.env[`${prefix}LOG_LEVEL`] : void 0,
|
|
41680
|
+
skipConfigLogging: process.env[`${prefix}SKIP_CONFIG_LOGGING`] !== void 0 ? Boolean(process.env[`${prefix}SKIP_CONFIG_LOGGING`]) : void 0
|
|
41667
41681
|
};
|
|
41668
41682
|
const themeNames = Object.keys(process.env).filter((envKey) => envKey.startsWith(`${prefix}COLOR_`) && COLOR_KEYS.every((colorKey) => !envKey.startsWith(`${prefix}COLOR_LIGHT_${colorKey}`) && !envKey.startsWith(`${prefix}COLOR_DARK_${colorKey}`)));
|
|
41669
41683
|
config.colors = themeNames.length > 0 ? themeNames.reduce((ret, themeName) => {
|
|
@@ -41899,6 +41913,9 @@ var setConfigEnv = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, (config
|
|
|
41899
41913
|
process.env.NX_VERBOSE_LOGGING = String(getLogLevel(config.logLevel) >= LogLevel.DEBUG ? true : false);
|
|
41900
41914
|
process.env.RUST_BACKTRACE = getLogLevel(config.logLevel) >= LogLevel.DEBUG ? "full" : "none";
|
|
41901
41915
|
}
|
|
41916
|
+
if (config.skipConfigLogging !== void 0) {
|
|
41917
|
+
process.env[`${prefix}SKIP_CONFIG_LOGGING`] = String(config.skipConfigLogging);
|
|
41918
|
+
}
|
|
41902
41919
|
process.env[`${prefix}CONFIG`] = JSON.stringify(config);
|
|
41903
41920
|
for (const key of Object.keys(_nullishCoalesce(config.extensions, () => ( {})))) {
|
|
41904
41921
|
config.extensions[key] && Object.keys(config.extensions[key]) && setExtensionEnv(key, config.extensions[key]);
|
|
@@ -42016,7 +42033,7 @@ var createStormConfig = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, as
|
|
|
42016
42033
|
const defaultConfig = await getDefaultConfig(_workspaceRoot);
|
|
42017
42034
|
const configFile = await getConfigFile(_workspaceRoot);
|
|
42018
42035
|
if (!configFile && !skipLogs) {
|
|
42019
|
-
writeWarning("No Storm
|
|
42036
|
+
writeWarning("No Storm Workspace configuration file found in the current repository. Please ensure this is the expected behavior - you can add a `storm-workspace.json` file to the root of your workspace if it is not.\n", {
|
|
42020
42037
|
logLevel: "all"
|
|
42021
42038
|
});
|
|
42022
42039
|
}
|
|
@@ -42054,8 +42071,8 @@ var createConfigExtension = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0
|
|
|
42054
42071
|
var loadStormConfig = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (workspaceRoot, skipLogs = false) => {
|
|
42055
42072
|
const config = await createStormConfig(void 0, void 0, workspaceRoot, skipLogs);
|
|
42056
42073
|
setConfigEnv(config);
|
|
42057
|
-
if (!skipLogs) {
|
|
42058
|
-
writeTrace(`\u2699\uFE0F Using Storm configuration:
|
|
42074
|
+
if (!skipLogs && !config.skipConfigLogging) {
|
|
42075
|
+
writeTrace(`\u2699\uFE0F Using Storm Workspace configuration:
|
|
42059
42076
|
${formatLogMessage(config)}`, config);
|
|
42060
42077
|
}
|
|
42061
42078
|
return config;
|
package/bin/git.cjs
CHANGED
|
@@ -22,7 +22,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
|
|
25
|
-
var
|
|
25
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
var _chunkJMRHG3KScjs = require('./chunk-JMRHG3KS.cjs');
|
|
@@ -3265,7 +3265,7 @@ var require_wrap_ansi = _chunkEM6PLOYYcjs.__commonJS.call(void 0, {
|
|
|
3265
3265
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
3266
3266
|
var stringWidth = require_string_width();
|
|
3267
3267
|
var stripAnsi2 = require_strip_ansi();
|
|
3268
|
-
var ansiStyles2 =
|
|
3268
|
+
var ansiStyles2 = _chunkWS6FE5KDcjs.require_ansi_styles.call(void 0, );
|
|
3269
3269
|
var ESCAPES2 = /* @__PURE__ */ new Set([
|
|
3270
3270
|
"\x1B",
|
|
3271
3271
|
"\x9B"
|
|
@@ -15607,7 +15607,7 @@ var require_node = _chunkEM6PLOYYcjs.__commonJS.call(void 0, {
|
|
|
15607
15607
|
1
|
|
15608
15608
|
];
|
|
15609
15609
|
try {
|
|
15610
|
-
const supportsColor2 =
|
|
15610
|
+
const supportsColor2 = _chunkWS6FE5KDcjs.require_supports_color.call(void 0, );
|
|
15611
15611
|
if (supportsColor2 && (supportsColor2.stderr || supportsColor2).level >= 2) {
|
|
15612
15612
|
exports.colors = [
|
|
15613
15613
|
20,
|
|
@@ -50459,12 +50459,12 @@ var getRuleFromScopeEnum = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0,
|
|
|
50459
50459
|
// src/commitlint/run.ts
|
|
50460
50460
|
var COMMIT_EDITMSG_PATH = ".git/COMMIT_EDITMSG";
|
|
50461
50461
|
var runCommitLint = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (config, params) => {
|
|
50462
|
-
|
|
50462
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "\u{1F4DD} Validating git commit message aligns with the Storm Software specification", config);
|
|
50463
50463
|
let commitMessage;
|
|
50464
50464
|
if (params.message && params.message !== COMMIT_EDITMSG_PATH) {
|
|
50465
50465
|
commitMessage = params.message;
|
|
50466
50466
|
} else {
|
|
50467
|
-
const commitFile =
|
|
50467
|
+
const commitFile = _chunkWS6FE5KDcjs.joinPaths.call(void 0, config.workspaceRoot, params.file || params.message || COMMIT_EDITMSG_PATH);
|
|
50468
50468
|
if (_fs.existsSync.call(void 0, commitFile)) {
|
|
50469
50469
|
commitMessage = await _asyncOptionalChain([(await _promises.readFile.call(void 0, commitFile, "utf8")), 'optionalAccess', async _80 => _80.trim, 'call', async _81 => _81()]);
|
|
50470
50470
|
}
|
|
@@ -50476,19 +50476,19 @@ var runCommitLint = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async
|
|
|
50476
50476
|
if (upstreamRemote) {
|
|
50477
50477
|
const upstreamRemoteIdentifier = _optionalChain([upstreamRemote, 'access', _82 => _82.split, 'call', _83 => _83(" "), 'access', _84 => _84[0], 'optionalAccess', _85 => _85.trim, 'call', _86 => _86()]);
|
|
50478
50478
|
if (!upstreamRemoteIdentifier) {
|
|
50479
|
-
|
|
50479
|
+
_chunkWS6FE5KDcjs.writeWarning.call(void 0, `No upstream remote found for ${config.name}.git. Skipping comparison.`, config);
|
|
50480
50480
|
return;
|
|
50481
50481
|
}
|
|
50482
|
-
|
|
50482
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, `Comparing against remote ${upstreamRemoteIdentifier}`);
|
|
50483
50483
|
const currentBranch = _child_process2.default.execSync("git branch --show-current").toString().trim();
|
|
50484
50484
|
gitLogCmd = gitLogCmd + ` ${currentBranch} ^${upstreamRemoteIdentifier}/main`;
|
|
50485
50485
|
} else {
|
|
50486
|
-
|
|
50486
|
+
_chunkWS6FE5KDcjs.writeWarning.call(void 0, `No upstream remote found for ${config.name}.git. Skipping comparison against upstream main.`, config);
|
|
50487
50487
|
return;
|
|
50488
50488
|
}
|
|
50489
50489
|
commitMessage = _child_process2.default.execSync(gitLogCmd).toString().trim();
|
|
50490
50490
|
if (!commitMessage) {
|
|
50491
|
-
|
|
50491
|
+
_chunkWS6FE5KDcjs.writeWarning.call(void 0, "No commits found. Skipping commit message validation.", config);
|
|
50492
50492
|
return;
|
|
50493
50493
|
}
|
|
50494
50494
|
}
|
|
@@ -50506,7 +50506,7 @@ var runCommitLint = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async
|
|
|
50506
50506
|
helpUrl: commitlintConfig.helpUrl
|
|
50507
50507
|
});
|
|
50508
50508
|
if (!matchCommit || report.errors.length || report.warnings.length) {
|
|
50509
|
-
|
|
50509
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, `Commit was processing completed successfully!`, config);
|
|
50510
50510
|
} else {
|
|
50511
50511
|
let errorMessage = " Oh no! Your commit message: \n-------------------------------------------------------------------\n" + commitMessage + "\n-------------------------------------------------------------------\n\n Does not follow the commit message convention specified by Storm Software.";
|
|
50512
50512
|
errorMessage += "\ntype(scope): subject \n BLANK LINE \n body";
|
|
@@ -51700,7 +51700,7 @@ var createState = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (c
|
|
|
51700
51700
|
answers: {}
|
|
51701
51701
|
};
|
|
51702
51702
|
} else {
|
|
51703
|
-
|
|
51703
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `Using custom commit config file: ${commitizenFile}`, config);
|
|
51704
51704
|
let commitizenConfig = await Promise.resolve().then(() => _interopRequireWildcard(require(commitizenFile)));
|
|
51705
51705
|
if (_optionalChain([commitizenConfig, 'optionalAccess', _92 => _92.default])) {
|
|
51706
51706
|
commitizenConfig = _optionalChain([commitizenConfig, 'optionalAccess', _93 => _93.default]);
|
|
@@ -51741,7 +51741,7 @@ var createState = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (c
|
|
|
51741
51741
|
}
|
|
51742
51742
|
if (project) {
|
|
51743
51743
|
let description = `${project.name} - ${project.root}`;
|
|
51744
|
-
const packageJsonPath =
|
|
51744
|
+
const packageJsonPath = _chunkWS6FE5KDcjs.joinPaths.call(void 0, project.root, "package.json");
|
|
51745
51745
|
if (await hfs.isFile(packageJsonPath)) {
|
|
51746
51746
|
const packageJson = await hfs.json(packageJsonPath);
|
|
51747
51747
|
description = packageJson.description || description;
|
|
@@ -51815,10 +51815,10 @@ ${closedIssueEmoji}${config.prompt.settings.closedIssueMessage}${issues}`;
|
|
|
51815
51815
|
|
|
51816
51816
|
// src/commit/run.ts
|
|
51817
51817
|
var runCommit = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (commitizenFile = "@storm-software/git-tools/commit/config", dryRun = false) => {
|
|
51818
|
-
const config = await
|
|
51818
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
51819
51819
|
const state = await createState(config, commitizenFile);
|
|
51820
51820
|
if (dryRun) {
|
|
51821
|
-
|
|
51821
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running in dry mode.", config);
|
|
51822
51822
|
}
|
|
51823
51823
|
console.log(chalk_template_default`
|
|
51824
51824
|
{bold.#999999 ----------------------------------------}
|
|
@@ -51828,7 +51828,7 @@ var runCommit = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (com
|
|
|
51828
51828
|
`);
|
|
51829
51829
|
state.answers = await askQuestions(state);
|
|
51830
51830
|
const message2 = formatCommitMessage(state);
|
|
51831
|
-
const commitMsgFile =
|
|
51831
|
+
const commitMsgFile = _chunkWS6FE5KDcjs.joinPaths.call(void 0, getGitDir(), "COMMIT_EDITMSG");
|
|
51832
51832
|
console.log(chalk_template_default`
|
|
51833
51833
|
{bold.#999999 ----------------------------------------}
|
|
51834
51834
|
|
|
@@ -51852,8 +51852,8 @@ var runCommit = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (com
|
|
|
51852
51852
|
]);
|
|
51853
51853
|
const command = (0, import_any_shell_escape.default)(commandItems);
|
|
51854
51854
|
if (dryRun) {
|
|
51855
|
-
|
|
51856
|
-
|
|
51855
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, `Skipping execution [dry-run]: ${command.replace(commitMsgFile, ".git/COMMIT_EDITMSG")}`, config);
|
|
51856
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, `Message [dry-run]: ${message2}`, config);
|
|
51857
51857
|
} else {
|
|
51858
51858
|
await _promises2.default.writeFile(commitMsgFile, message2);
|
|
51859
51859
|
_chunkUAKVQGZUcjs.run.call(void 0, config, command);
|
|
@@ -64320,13 +64320,13 @@ var _filemaputilsjs = require('nx/src/project-graph/file-map-utils.js');
|
|
|
64320
64320
|
|
|
64321
64321
|
var _paramsjs = require('nx/src/utils/params.js');
|
|
64322
64322
|
async function releaseVersion(config, args) {
|
|
64323
|
-
|
|
64323
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running release version command", config);
|
|
64324
64324
|
const projectGraph = await _projectgraphjs.createProjectGraphAsync.call(void 0, {
|
|
64325
64325
|
exitOnError: true
|
|
64326
64326
|
});
|
|
64327
64327
|
const { projects } = _projectgraphjs.readProjectsConfigurationFromProjectGraph.call(void 0, projectGraph);
|
|
64328
64328
|
const nxJson = _nxjsonjs.readNxJson.call(void 0, );
|
|
64329
|
-
const workspaceRoot2 = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _177 => _177.workspaceRoot]), () => (
|
|
64329
|
+
const workspaceRoot2 = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _177 => _177.workspaceRoot]), () => ( _chunkWS6FE5KDcjs.findWorkspaceRoot.call(void 0, )));
|
|
64330
64330
|
if (args.verbose) {
|
|
64331
64331
|
process.env.NX_VERBOSE_LOGGING = "true";
|
|
64332
64332
|
}
|
|
@@ -64341,10 +64341,10 @@ async function releaseVersion(config, args) {
|
|
|
64341
64341
|
]);
|
|
64342
64342
|
throw new Error(`The "release.git" property in nx.json may not be used with the "nx release version" subcommand or programmatic API. Instead, configure git options for subcommands directly with "release.version.git" and "release.changelog.git".`);
|
|
64343
64343
|
}
|
|
64344
|
-
|
|
64344
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Filtering projects and release groups", config);
|
|
64345
64345
|
const { error: filterError, releaseGroups, releaseGroupToFilteredProjects } = _filterreleasegroupsjs.filterReleaseGroups.call(void 0, projectGraph, nxReleaseConfig, args.projects, args.groups);
|
|
64346
64346
|
if (filterError) {
|
|
64347
|
-
|
|
64347
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, filterError.title, config);
|
|
64348
64348
|
throw new Error(filterError.title);
|
|
64349
64349
|
}
|
|
64350
64350
|
const tree = new (0, _treejs.FsTree)(workspaceRoot2, true);
|
|
@@ -64353,10 +64353,10 @@ async function releaseVersion(config, args) {
|
|
|
64353
64353
|
const additionalChangedFiles = /* @__PURE__ */ new Set();
|
|
64354
64354
|
const generatorCallbacks = [];
|
|
64355
64355
|
if (_optionalChain([args, 'access', _183 => _183.projects, 'optionalAccess', _184 => _184.length])) {
|
|
64356
|
-
|
|
64356
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Run versioning for all remaining release groups and filtered projects within them", config);
|
|
64357
64357
|
for (const releaseGroup of releaseGroups) {
|
|
64358
64358
|
const releaseGroupName = releaseGroup.name;
|
|
64359
|
-
|
|
64359
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `Running versioning for release group "${releaseGroupName}" and filtered projects within it`, config);
|
|
64360
64360
|
const releaseGroupProjectNames = Array.from(_nullishCoalesce(releaseGroupToFilteredProjects.get(releaseGroup), () => ( [])));
|
|
64361
64361
|
const projectBatches = _batchprojectsbygeneratorconfigjs.batchProjectsByGeneratorConfig.call(void 0,
|
|
64362
64362
|
projectGraph,
|
|
@@ -64365,7 +64365,7 @@ async function releaseVersion(config, args) {
|
|
|
64365
64365
|
releaseGroupProjectNames
|
|
64366
64366
|
);
|
|
64367
64367
|
for (const [generatorConfigString, projectNames] of projectBatches.entries()) {
|
|
64368
|
-
|
|
64368
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `Running versioning for batch "${JSON.stringify(projectNames)}" for release-group "${releaseGroupName}"`, config);
|
|
64369
64369
|
const [generatorName, generatorOptions] = JSON.parse(generatorConfigString);
|
|
64370
64370
|
const generatorData = resolveGeneratorData({
|
|
64371
64371
|
...extractGeneratorCollectionAndName(`batch "${JSON.stringify(projectNames)}" for release-group "${releaseGroupName}"`, generatorName),
|
|
@@ -64418,7 +64418,7 @@ async function releaseVersion(config, args) {
|
|
|
64418
64418
|
gitCommitArgs: args.gitCommitArgs || _optionalChain([nxReleaseConfig, 'optionalAccess', _191 => _191.version, 'access', _192 => _192.git, 'access', _193 => _193.commitArgs])
|
|
64419
64419
|
});
|
|
64420
64420
|
} else if (_nullishCoalesce(args.stageChanges, () => ( _optionalChain([nxReleaseConfig, 'optionalAccess', _194 => _194.version, 'access', _195 => _195.git, 'access', _196 => _196.stageChanges])))) {
|
|
64421
|
-
|
|
64421
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Staging changed files with git", config);
|
|
64422
64422
|
await _gitjs.gitAdd.call(void 0, {
|
|
64423
64423
|
changedFiles: changedFiles2,
|
|
64424
64424
|
dryRun: args.dryRun,
|
|
@@ -64426,7 +64426,7 @@ async function releaseVersion(config, args) {
|
|
|
64426
64426
|
});
|
|
64427
64427
|
}
|
|
64428
64428
|
if (_nullishCoalesce(args.gitTag, () => ( _optionalChain([nxReleaseConfig, 'optionalAccess', _197 => _197.version, 'access', _198 => _198.git, 'access', _199 => _199.tag])))) {
|
|
64429
|
-
|
|
64429
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Tagging commit with git", config);
|
|
64430
64430
|
for (const tag of gitTagValues2) {
|
|
64431
64431
|
await _gitjs.gitTag.call(void 0, {
|
|
64432
64432
|
tag,
|
|
@@ -64513,7 +64513,7 @@ async function releaseVersion(config, args) {
|
|
|
64513
64513
|
gitCommitArgs: args.gitCommitArgs || _optionalChain([nxReleaseConfig, 'optionalAccess', _215 => _215.version, 'access', _216 => _216.git, 'access', _217 => _217.commitArgs])
|
|
64514
64514
|
});
|
|
64515
64515
|
} else if (_nullishCoalesce(args.stageChanges, () => ( _optionalChain([nxReleaseConfig, 'optionalAccess', _218 => _218.version, 'access', _219 => _219.git, 'access', _220 => _220.stageChanges])))) {
|
|
64516
|
-
|
|
64516
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Staging changed files with git", config);
|
|
64517
64517
|
await _gitjs.gitAdd.call(void 0, {
|
|
64518
64518
|
changedFiles,
|
|
64519
64519
|
dryRun: args.dryRun,
|
|
@@ -64521,7 +64521,7 @@ async function releaseVersion(config, args) {
|
|
|
64521
64521
|
});
|
|
64522
64522
|
}
|
|
64523
64523
|
if (_nullishCoalesce(args.gitTag, () => ( _optionalChain([nxReleaseConfig, 'optionalAccess', _221 => _221.version, 'access', _222 => _222.git, 'access', _223 => _223.tag])))) {
|
|
64524
|
-
|
|
64524
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Tagging commit with git", config);
|
|
64525
64525
|
for (const tag of gitTagValues) {
|
|
64526
64526
|
await _gitjs.gitTag.call(void 0, {
|
|
64527
64527
|
tag,
|
|
@@ -64539,7 +64539,7 @@ async function releaseVersion(config, args) {
|
|
|
64539
64539
|
}
|
|
64540
64540
|
_chunkEM6PLOYYcjs.__name.call(void 0, releaseVersion, "releaseVersion");
|
|
64541
64541
|
async function runVersionOnProjects(config, projectGraph, nxJson, args, tree, generatorData, projectNames, releaseGroup, versionData) {
|
|
64542
|
-
const workspaceRoot2 = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _230 => _230.workspaceRoot]), () => (
|
|
64542
|
+
const workspaceRoot2 = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _230 => _230.workspaceRoot]), () => ( _chunkWS6FE5KDcjs.findWorkspaceRoot.call(void 0, )));
|
|
64543
64543
|
const generatorOptions = {
|
|
64544
64544
|
// Always ensure a string to avoid generator schema validation errors
|
|
64545
64545
|
specifier: _nullishCoalesce(args.specifier, () => ( "")),
|
|
@@ -64558,7 +64558,7 @@ async function runVersionOnProjects(config, projectGraph, nxJson, args, tree, ge
|
|
|
64558
64558
|
throw new Error(`The version generator ${generatorData.collectionName}:${generatorData.normalizedGeneratorName} returned a function instead of an expected ReleaseVersionGeneratorResult`);
|
|
64559
64559
|
}
|
|
64560
64560
|
appendVersionData(versionData, versionResult.data);
|
|
64561
|
-
|
|
64561
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, `Updated version data: ${JSON.stringify(versionData, null, 2)}`, config);
|
|
64562
64562
|
return versionResult.callback;
|
|
64563
64563
|
}
|
|
64564
64564
|
_chunkEM6PLOYYcjs.__name.call(void 0, runVersionOnProjects, "runVersionOnProjects");
|
|
@@ -64566,10 +64566,10 @@ function printAndFlushChanges(config, tree, isDryRun) {
|
|
|
64566
64566
|
const changes = tree.listChanges();
|
|
64567
64567
|
for (const f of changes) {
|
|
64568
64568
|
if (f.type === "CREATE") {
|
|
64569
|
-
|
|
64569
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `CREATE ${f.path}${isDryRun ? " [dry-run]" : ""}`, config);
|
|
64570
64570
|
_printchangesjs.printDiff.call(void 0, "", _optionalChain([f, 'access', _231 => _231.content, 'optionalAccess', _232 => _232.toString, 'call', _233 => _233()]) || "");
|
|
64571
64571
|
} else if (f.type === "UPDATE") {
|
|
64572
|
-
|
|
64572
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `UPDATE ${f.path}${isDryRun ? " [dry-run]" : ""}`, config);
|
|
64573
64573
|
const currentContentsOnDisk = _fs.readFileSync.call(void 0, (0, import_devkit.joinPathFragments)(tree.root, f.path)).toString();
|
|
64574
64574
|
_printchangesjs.printDiff.call(void 0, currentContentsOnDisk, _optionalChain([f, 'access', _234 => _234.content, 'optionalAccess', _235 => _235.toString, 'call', _236 => _236()]) || "");
|
|
64575
64575
|
} else if (f.type === "DELETE") {
|
|
@@ -64642,12 +64642,12 @@ var runRelease = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (co
|
|
|
64642
64642
|
process.env.NODE_AUTH_TOKEN = process.env.NPM_TOKEN;
|
|
64643
64643
|
process.env.NPM_AUTH_TOKEN = process.env.NPM_TOKEN;
|
|
64644
64644
|
process.env.NPM_CONFIG_PROVENANCE = "true";
|
|
64645
|
-
|
|
64645
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, "Creating workspace Project Graph data...", config);
|
|
64646
64646
|
const nxJson = _nxjsonjs.readNxJson.call(void 0, );
|
|
64647
|
-
|
|
64647
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, "Reading in the workspaces release configuration", config);
|
|
64648
64648
|
const to = options.head || process.env.NX_HEAD;
|
|
64649
64649
|
const from = options.base || process.env.NX_BASE;
|
|
64650
|
-
|
|
64650
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, `Using the following Git SHAs to determine the release content:
|
|
64651
64651
|
- From: ${from}
|
|
64652
64652
|
- To: ${to}
|
|
64653
64653
|
`, config);
|
|
@@ -64659,14 +64659,14 @@ var runRelease = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (co
|
|
|
64659
64659
|
}, {});
|
|
64660
64660
|
}
|
|
64661
64661
|
const nxReleaseConfig = _chunkJMRHG3KScjs.defu.call(void 0, nxJson.release, DEFAULT_RELEASE_CONFIG);
|
|
64662
|
-
|
|
64663
|
-
|
|
64662
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Using the following `nx.json` release configuration values", config);
|
|
64663
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, nxReleaseConfig, config);
|
|
64664
64664
|
const releaseChangelog = _changelogjs.createAPI.call(void 0, nxReleaseConfig);
|
|
64665
64665
|
const releasePublish = _publishjs.createAPI.call(void 0, nxReleaseConfig);
|
|
64666
|
-
|
|
64666
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, "Determining the current release versions...", config);
|
|
64667
64667
|
const { workspaceVersion, projectsVersionData } = await releaseVersion(config, {
|
|
64668
64668
|
dryRun: false,
|
|
64669
|
-
verbose:
|
|
64669
|
+
verbose: _chunkWS6FE5KDcjs.isVerbose.call(void 0, config.logLevel),
|
|
64670
64670
|
preid: config.preid,
|
|
64671
64671
|
deleteVersionPlans: false,
|
|
64672
64672
|
stageChanges: true,
|
|
@@ -64678,25 +64678,25 @@ var runRelease = /* @__PURE__ */ _chunkEM6PLOYYcjs.__name.call(void 0, async (co
|
|
|
64678
64678
|
version: _optionalChain([nxReleaseConfig, 'optionalAccess', _239 => _239.projectsRelationship]) !== "fixed" ? void 0 : workspaceVersion,
|
|
64679
64679
|
versionData: projectsVersionData,
|
|
64680
64680
|
dryRun: false,
|
|
64681
|
-
verbose:
|
|
64681
|
+
verbose: _chunkWS6FE5KDcjs.isVerbose.call(void 0, config.logLevel),
|
|
64682
64682
|
to,
|
|
64683
64683
|
from,
|
|
64684
64684
|
gitCommit: true,
|
|
64685
64685
|
gitCommitMessage: "release(monorepo): Publish workspace release updates"
|
|
64686
64686
|
});
|
|
64687
|
-
|
|
64687
|
+
_chunkWS6FE5KDcjs.writeDebug.call(void 0, "Tagging commit with git", config);
|
|
64688
64688
|
if (options.skipPublish) {
|
|
64689
|
-
|
|
64689
|
+
_chunkWS6FE5KDcjs.writeWarning.call(void 0, "Skipping publishing packages since `skipPublish` was provided as `true` in the release options.", config);
|
|
64690
64690
|
} else {
|
|
64691
64691
|
const changedProjects = Object.keys(projectsVersionData).filter((key) => _optionalChain([projectsVersionData, 'access', _240 => _240[key], 'optionalAccess', _241 => _241.newVersion]));
|
|
64692
64692
|
if (changedProjects.length > 0) {
|
|
64693
|
-
|
|
64693
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `Publishing release for ${changedProjects.length} ${changedProjects.length === 1 ? "project" : "projects"}:
|
|
64694
64694
|
${changedProjects.map((changedProject) => ` - ${changedProject}`).join("\n")}
|
|
64695
64695
|
`, config);
|
|
64696
64696
|
const result2 = await releasePublish({
|
|
64697
64697
|
...options,
|
|
64698
64698
|
dryRun: !!options.dryRun,
|
|
64699
|
-
verbose:
|
|
64699
|
+
verbose: _chunkWS6FE5KDcjs.isVerbose.call(void 0, config.logLevel)
|
|
64700
64700
|
});
|
|
64701
64701
|
const failedProjects = Object.keys(result2).filter((key) => _optionalChain([result2, 'access', _242 => _242[key], 'optionalAccess', _243 => _243.code]) && _optionalChain([result2, 'access', _244 => _244[key], 'optionalAccess', _245 => _245.code]) > 0);
|
|
64702
64702
|
if (failedProjects.length > 0) {
|
|
@@ -64707,13 +64707,13 @@ ${failedProjects.map((failedProject) => ` - ${failedProject} (Error Code: ${_op
|
|
|
64707
64707
|
`);
|
|
64708
64708
|
}
|
|
64709
64709
|
} else {
|
|
64710
|
-
|
|
64710
|
+
_chunkWS6FE5KDcjs.writeWarning.call(void 0, "Skipped publishing packages.", config);
|
|
64711
64711
|
}
|
|
64712
64712
|
}
|
|
64713
|
-
|
|
64713
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, "Completed the Storm workspace release process!", config);
|
|
64714
64714
|
} catch (error) {
|
|
64715
|
-
|
|
64716
|
-
error.message &&
|
|
64715
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, "An exception was thrown while running the Storm release workflow.", config);
|
|
64716
|
+
error.message && _chunkWS6FE5KDcjs.writeError.call(void 0, `${error.name ? `${error.name}: ` : ""}${error.message}${error.stack ? `
|
|
64717
64717
|
${error.stack}` : ""}`, config);
|
|
64718
64718
|
throw error;
|
|
64719
64719
|
}
|
|
@@ -64723,8 +64723,8 @@ ${error.stack}` : ""}`, config);
|
|
|
64723
64723
|
var _config = {};
|
|
64724
64724
|
function createProgram(config) {
|
|
64725
64725
|
_config = config;
|
|
64726
|
-
|
|
64727
|
-
const root2 =
|
|
64726
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "\u26A1 Running Storm Git Tools", config);
|
|
64727
|
+
const root2 = _chunkWS6FE5KDcjs.findWorkspaceRootSafe.call(void 0, process.cwd());
|
|
64728
64728
|
process.env.STORM_WORKSPACE_ROOT ??= root2;
|
|
64729
64729
|
process.env.NX_WORKSPACE_ROOT_PATH ??= root2;
|
|
64730
64730
|
root2 && process.chdir(root2);
|
|
@@ -64752,14 +64752,14 @@ function createProgram(config) {
|
|
|
64752
64752
|
_chunkEM6PLOYYcjs.__name.call(void 0, createProgram, "createProgram");
|
|
64753
64753
|
async function commitAction({ config = "@storm-software/git-tools/commit/config.js", dryRun = false }) {
|
|
64754
64754
|
try {
|
|
64755
|
-
|
|
64755
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `\u26A1 Preparing to commit your changes. Please provide the requested details below...`, _config);
|
|
64756
64756
|
await runCommit(config, dryRun);
|
|
64757
|
-
|
|
64757
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, `\u{1F389} Storm Commit processing completed successfully!
|
|
64758
64758
|
|
|
64759
64759
|
Note: Please run "pnpm push" to upload these changes to the remote ${_config.name ? _config.name : _config.namespace ? _config.namespace : _config.organization ? _config.organization : "Storm-Software"} Git repository at ${_config.repository}
|
|
64760
64760
|
`, _config);
|
|
64761
64761
|
} catch (error) {
|
|
64762
|
-
|
|
64762
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running commit action:
|
|
64763
64763
|
|
|
64764
64764
|
${error.message}`, _config);
|
|
64765
64765
|
throw new Error(error.message, {
|
|
@@ -64770,11 +64770,11 @@ ${error.message}`, _config);
|
|
|
64770
64770
|
_chunkEM6PLOYYcjs.__name.call(void 0, commitAction, "commitAction");
|
|
64771
64771
|
async function readmeAction(options) {
|
|
64772
64772
|
try {
|
|
64773
|
-
|
|
64773
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "\u26A1 Formatting the workspace's README.md files", _config);
|
|
64774
64774
|
await runReadme(options);
|
|
64775
|
-
|
|
64775
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, "Formatting of the workspace's README.md files is complete\n", _config);
|
|
64776
64776
|
} catch (error) {
|
|
64777
|
-
|
|
64777
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running README format action:
|
|
64778
64778
|
|
|
64779
64779
|
${error.message}`);
|
|
64780
64780
|
throw new Error(error.message, {
|
|
@@ -64785,19 +64785,19 @@ ${error.message}`);
|
|
|
64785
64785
|
_chunkEM6PLOYYcjs.__name.call(void 0, readmeAction, "readmeAction");
|
|
64786
64786
|
async function releaseAction({ project, base, head, dryRun }) {
|
|
64787
64787
|
try {
|
|
64788
|
-
|
|
64788
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "\u26A1 Running the Storm Release and Publish process on the workspace", _config);
|
|
64789
64789
|
await runRelease(_config, {
|
|
64790
64790
|
dryRun,
|
|
64791
64791
|
project,
|
|
64792
64792
|
base,
|
|
64793
64793
|
head
|
|
64794
64794
|
});
|
|
64795
|
-
|
|
64795
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, "Release completed successfully!\n", _config);
|
|
64796
64796
|
} catch (error) {
|
|
64797
|
-
|
|
64797
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running release action:
|
|
64798
64798
|
|
|
64799
64799
|
${error.message}`, _config);
|
|
64800
|
-
|
|
64800
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, error, _config);
|
|
64801
64801
|
console.error("");
|
|
64802
64802
|
console.error("Fatal release error: ");
|
|
64803
64803
|
console.error(error);
|
|
@@ -64810,15 +64810,15 @@ ${error.message}`, _config);
|
|
|
64810
64810
|
_chunkEM6PLOYYcjs.__name.call(void 0, releaseAction, "releaseAction");
|
|
64811
64811
|
async function commitLintAction({ config, message: message2, file }) {
|
|
64812
64812
|
try {
|
|
64813
|
-
|
|
64813
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, `\u26A1 Linting the ${_config.repository ? _config.repository : _config.namespace ? _config.namespace : _config.name ? _config.name : _config.organization ? _config.organization : "Storm-Software"} repository's commit messages.`, _config);
|
|
64814
64814
|
await runCommitLint(_config, {
|
|
64815
64815
|
config,
|
|
64816
64816
|
message: message2,
|
|
64817
64817
|
file
|
|
64818
64818
|
});
|
|
64819
|
-
|
|
64819
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, "Linting the commit messages completed successfully!\n", _config);
|
|
64820
64820
|
} catch (error) {
|
|
64821
|
-
|
|
64821
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while linting the commit messages:
|
|
64822
64822
|
|
|
64823
64823
|
${error.message}`, _config);
|
|
64824
64824
|
throw new Error(error.message, {
|
|
@@ -64830,18 +64830,18 @@ _chunkEM6PLOYYcjs.__name.call(void 0, commitLintAction, "commitLintAction");
|
|
|
64830
64830
|
|
|
64831
64831
|
// bin/git.ts
|
|
64832
64832
|
void (async () => {
|
|
64833
|
-
const config = await
|
|
64833
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
64834
64834
|
try {
|
|
64835
|
-
|
|
64835
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
64836
64836
|
const program2 = createProgram(config);
|
|
64837
64837
|
await program2.parseAsync(process.argv);
|
|
64838
|
-
|
|
64839
|
-
|
|
64838
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, `\u{1F389} Git ${process.argv && process.argv.length >= 3 && process.argv[2] ? process.argv[2] : "tool"} processing completed successfully!`, config);
|
|
64839
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
64840
64840
|
} catch (error) {
|
|
64841
|
-
|
|
64841
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the Storm Git tool:
|
|
64842
64842
|
${_optionalChain([error, 'optionalAccess', _248 => _248.message]) ? error.message : JSON.stringify(error)}${_optionalChain([error, 'optionalAccess', _249 => _249.stack]) ? `
|
|
64843
64843
|
Stack Trace: ${error.stack}` : ""}`, config);
|
|
64844
|
-
|
|
64844
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
64845
64845
|
process.exit(1);
|
|
64846
64846
|
}
|
|
64847
64847
|
})();
|
package/bin/git.js
CHANGED
package/bin/post-checkout.cjs
CHANGED
|
@@ -13,7 +13,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
var
|
|
16
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
17
17
|
require('./chunk-JMRHG3KS.cjs');
|
|
18
18
|
require('./chunk-NHO7HSOE.cjs');
|
|
19
19
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -27,23 +27,23 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
27
27
|
// bin/post-checkout.ts
|
|
28
28
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
29
29
|
void (async () => {
|
|
30
|
-
const config = await
|
|
30
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
31
31
|
try {
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
33
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running post-checkout hook...", config);
|
|
34
34
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, _optionalChain([process, 'access', _ => _.argv, 'optionalAccess', _2 => _2.slice, 'call', _3 => _3(1)]));
|
|
35
35
|
try {
|
|
36
36
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git-lfs version");
|
|
37
37
|
} catch (error) {
|
|
38
|
-
|
|
38
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, `This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/post-checkout.
|
|
39
39
|
Error: ${_optionalChain([error, 'optionalAccess', _4 => _4.message])}`, config);
|
|
40
|
-
|
|
40
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
41
41
|
}
|
|
42
42
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git lfs post-checkout");
|
|
43
|
-
|
|
43
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
44
44
|
} catch (error) {
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
46
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
47
47
|
process.exit(1);
|
|
48
48
|
}
|
|
49
49
|
})();
|
package/bin/post-checkout.js
CHANGED
package/bin/post-commit.cjs
CHANGED
|
@@ -13,7 +13,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
var
|
|
16
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
17
17
|
require('./chunk-JMRHG3KS.cjs');
|
|
18
18
|
require('./chunk-NHO7HSOE.cjs');
|
|
19
19
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -27,23 +27,23 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
27
27
|
// bin/post-commit.ts
|
|
28
28
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
29
29
|
void (async () => {
|
|
30
|
-
const config = await
|
|
30
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
31
31
|
try {
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
33
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running post-commit hook...", config);
|
|
34
34
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, _optionalChain([process, 'access', _ => _.argv, 'optionalAccess', _2 => _2.slice, 'call', _3 => _3(1)]));
|
|
35
35
|
try {
|
|
36
36
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git-lfs version");
|
|
37
37
|
} catch (error) {
|
|
38
|
-
|
|
38
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, `This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/post-commit.
|
|
39
39
|
Error: ${_optionalChain([error, 'optionalAccess', _4 => _4.message])}`, config);
|
|
40
|
-
|
|
40
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
41
41
|
}
|
|
42
42
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git lfs post-commit");
|
|
43
|
-
|
|
43
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
44
44
|
} catch (error) {
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
46
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
47
47
|
process.exit(1);
|
|
48
48
|
}
|
|
49
49
|
})();
|
package/bin/post-commit.js
CHANGED
package/bin/post-merge.cjs
CHANGED
|
@@ -13,7 +13,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
var
|
|
16
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
17
17
|
require('./chunk-JMRHG3KS.cjs');
|
|
18
18
|
require('./chunk-NHO7HSOE.cjs');
|
|
19
19
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -27,23 +27,23 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
27
27
|
// bin/post-merge.ts
|
|
28
28
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
29
29
|
void (async () => {
|
|
30
|
-
const config = await
|
|
30
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
31
31
|
try {
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
33
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running post-merge hook...", config);
|
|
34
34
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, _optionalChain([process, 'access', _ => _.argv, 'optionalAccess', _2 => _2.slice, 'call', _3 => _3(1)]));
|
|
35
35
|
try {
|
|
36
36
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git-lfs version");
|
|
37
37
|
} catch (error) {
|
|
38
|
-
|
|
38
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, `This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/post-merge.
|
|
39
39
|
Error: ${_optionalChain([error, 'optionalAccess', _4 => _4.message])}`, config);
|
|
40
|
-
|
|
40
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
41
41
|
}
|
|
42
42
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git lfs post-merge");
|
|
43
|
-
|
|
43
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
44
44
|
} catch (error) {
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
46
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
47
47
|
process.exit(1);
|
|
48
48
|
}
|
|
49
49
|
})();
|
package/bin/post-merge.js
CHANGED
package/bin/pre-commit.cjs
CHANGED
|
@@ -11,7 +11,7 @@ var _chunkBXSXKKIZcjs = require('./chunk-BXSXKKIZ.cjs');
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
var
|
|
14
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
15
15
|
require('./chunk-JMRHG3KS.cjs');
|
|
16
16
|
require('./chunk-NHO7HSOE.cjs');
|
|
17
17
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -25,19 +25,19 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
25
25
|
// bin/pre-commit.ts
|
|
26
26
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
27
27
|
void (async () => {
|
|
28
|
-
const config = await
|
|
28
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
29
29
|
try {
|
|
30
|
-
|
|
31
|
-
|
|
30
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
31
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running pre-commit hook...", config);
|
|
32
32
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, process.argv.slice(1));
|
|
33
33
|
if (_chunkBXSXKKIZcjs.isPackageVersionChanged.call(void 0, _optionalChain([process, 'access', _ => _.argv, 'optionalAccess', _2 => _2.slice, 'call', _3 => _3(1)]))) {
|
|
34
|
-
|
|
35
|
-
|
|
34
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, "Please regenerate the package lock file before committing...", config);
|
|
35
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
36
36
|
}
|
|
37
|
-
|
|
37
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
38
38
|
} catch (error) {
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
40
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
41
41
|
process.exit(1);
|
|
42
42
|
}
|
|
43
43
|
})();
|
package/bin/pre-commit.js
CHANGED
package/bin/pre-install.cjs
CHANGED
|
@@ -9,7 +9,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
var
|
|
12
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
13
13
|
require('./chunk-JMRHG3KS.cjs');
|
|
14
14
|
require('./chunk-NHO7HSOE.cjs');
|
|
15
15
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -23,19 +23,19 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
23
23
|
// bin/pre-install.ts
|
|
24
24
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
25
25
|
void (async () => {
|
|
26
|
-
const config = await
|
|
26
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
27
27
|
try {
|
|
28
|
-
|
|
29
|
-
|
|
28
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
29
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running pre-install hook...", config);
|
|
30
30
|
if (Boolean(process.env.CI) || Boolean(process.env.STORM_CI)) {
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Skipping pre-install for CI process...", config);
|
|
32
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
33
33
|
}
|
|
34
34
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "npx -y only-allow pnpm");
|
|
35
|
-
|
|
35
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
36
36
|
} catch (error) {
|
|
37
|
-
|
|
38
|
-
|
|
37
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
38
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
39
39
|
process.exit(1);
|
|
40
40
|
}
|
|
41
41
|
})();
|
package/bin/pre-install.js
CHANGED
package/bin/pre-push.cjs
CHANGED
|
@@ -14,7 +14,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
var
|
|
17
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
18
18
|
require('./chunk-JMRHG3KS.cjs');
|
|
19
19
|
require('./chunk-NHO7HSOE.cjs');
|
|
20
20
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -31,12 +31,12 @@ var _fs = require('fs'); var _fs2 = _interopRequireDefault(_fs);
|
|
|
31
31
|
var _promises = require('fs/promises');
|
|
32
32
|
var _path = require('path'); var _path2 = _interopRequireDefault(_path);
|
|
33
33
|
void (async () => {
|
|
34
|
-
const config = await
|
|
34
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
35
35
|
try {
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
37
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running pre-push hook...", config);
|
|
38
38
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, _optionalChain([process, 'access', _ => _.argv, 'optionalAccess', _2 => _2.slice, 'call', _3 => _3(1)]));
|
|
39
|
-
|
|
39
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "\u{1F512}\u{1F512}\u{1F512} Validating lock files \u{1F512}\u{1F512}\u{1F512}\n", config);
|
|
40
40
|
const errors = [];
|
|
41
41
|
if (_fs2.default.existsSync(_path2.default.join(_nullishCoalesce(config.workspaceRoot, () => ( "./")), "package-lock.json"))) {
|
|
42
42
|
errors.push('Invalid occurrence of "package-lock.json" file. Please remove it and use only "pnpm-lock.yaml"');
|
|
@@ -58,26 +58,26 @@ void (async () => {
|
|
|
58
58
|
errors.push('The "pnpm-lock.yaml" does not exist or cannot be read');
|
|
59
59
|
}
|
|
60
60
|
if (errors.length > 0) {
|
|
61
|
-
|
|
61
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, "\u274C Lock file validation failed", config);
|
|
62
62
|
for (const error of errors) {
|
|
63
63
|
console.error(error);
|
|
64
64
|
}
|
|
65
|
-
|
|
65
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
66
66
|
}
|
|
67
|
-
|
|
67
|
+
_chunkWS6FE5KDcjs.writeSuccess.call(void 0, "Lock file is valid \u2705", config);
|
|
68
68
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git lfs pre-push origin");
|
|
69
69
|
try {
|
|
70
70
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git-lfs version");
|
|
71
71
|
} catch (error) {
|
|
72
|
-
|
|
72
|
+
_chunkWS6FE5KDcjs.writeError.call(void 0, `This repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting .git/hooks/pre-push.
|
|
73
73
|
Error: ${_optionalChain([error, 'optionalAccess', _4 => _4.message])}`, config);
|
|
74
|
-
|
|
74
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
75
75
|
}
|
|
76
76
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "git lfs pre-push origin");
|
|
77
|
-
|
|
77
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
78
78
|
} catch (error) {
|
|
79
|
-
|
|
80
|
-
|
|
79
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
80
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
81
81
|
process.exit(1);
|
|
82
82
|
}
|
|
83
83
|
})();
|
package/bin/pre-push.js
CHANGED
package/bin/prepare.cjs
CHANGED
|
@@ -9,7 +9,7 @@ var _chunkUAKVQGZUcjs = require('./chunk-UAKVQGZU.cjs');
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
var
|
|
12
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
13
13
|
require('./chunk-JMRHG3KS.cjs');
|
|
14
14
|
require('./chunk-NHO7HSOE.cjs');
|
|
15
15
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -23,17 +23,17 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
23
23
|
// bin/prepare.ts
|
|
24
24
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
25
25
|
void (async () => {
|
|
26
|
-
const config = await
|
|
26
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
27
27
|
try {
|
|
28
|
-
|
|
29
|
-
|
|
28
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
29
|
+
_chunkWS6FE5KDcjs.writeInfo.call(void 0, "Running prepare hook...", config);
|
|
30
30
|
if (!process.env.CI && !process.env.STORM_CI) {
|
|
31
31
|
_chunkUAKVQGZUcjs.run.call(void 0, config, "lefthook install");
|
|
32
32
|
}
|
|
33
|
-
|
|
33
|
+
_chunkWS6FE5KDcjs.exitWithSuccess.call(void 0, config);
|
|
34
34
|
} catch (error) {
|
|
35
|
-
|
|
36
|
-
|
|
35
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
36
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
37
37
|
process.exit(1);
|
|
38
38
|
}
|
|
39
39
|
})();
|
package/bin/prepare.js
CHANGED
package/bin/version-warning.cjs
CHANGED
|
@@ -7,7 +7,7 @@ var _chunkBXSXKKIZcjs = require('./chunk-BXSXKKIZ.cjs');
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
var
|
|
10
|
+
var _chunkWS6FE5KDcjs = require('./chunk-WS6FE5KD.cjs');
|
|
11
11
|
require('./chunk-JMRHG3KS.cjs');
|
|
12
12
|
require('./chunk-NHO7HSOE.cjs');
|
|
13
13
|
require('./chunk-WOZSAFMN.cjs');
|
|
@@ -21,13 +21,13 @@ var _chunkEM6PLOYYcjs = require('./chunk-EM6PLOYY.cjs');
|
|
|
21
21
|
// bin/version-warning.ts
|
|
22
22
|
_chunkEM6PLOYYcjs.init_cjs_shims.call(void 0, );
|
|
23
23
|
void (async () => {
|
|
24
|
-
const config = await
|
|
24
|
+
const config = await _chunkWS6FE5KDcjs.getConfig.call(void 0, );
|
|
25
25
|
try {
|
|
26
|
-
|
|
26
|
+
_chunkWS6FE5KDcjs.handleProcess.call(void 0, config);
|
|
27
27
|
_chunkBXSXKKIZcjs.checkPackageVersion.call(void 0, process.argv.slice(1));
|
|
28
28
|
} catch (error) {
|
|
29
|
-
|
|
30
|
-
|
|
29
|
+
_chunkWS6FE5KDcjs.writeFatal.call(void 0, `A fatal error occurred while running the program: ${error.message}`, config);
|
|
30
|
+
_chunkWS6FE5KDcjs.exitWithError.call(void 0, config);
|
|
31
31
|
process.exit(1);
|
|
32
32
|
}
|
|
33
33
|
})();
|
package/bin/version-warning.js
CHANGED