@willbooster/shared-lib-node 2.5.2 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cjs/env.cjs CHANGED
@@ -1,2 +1,2 @@
1
- "use strict";var e=require("node:path"),t=require("dotenv");exports.loadEnvironmentVariables=function(n,o){let r=(n.env??[]).map((e=>e.toString()));const s=n.cascadeNodeEnv?process.env.NODE_ENV??"":n.cascadeEnv;"string"==typeof s&&(0===r.length&&r.push(".env"),r=r.flatMap((e=>s?[`${e}.${s}.local`,`${e}.local`,`${e}.${s}`,e]:[`${e}.local`,e]))),n.verbose&&console.info("Loading env files:",r);let a={};for(const n of r)a={...t.config({path:e.join(o,n)}).parsed,...a};return a},exports.removeNpmAndYarnEnvironmentVariables=function(e){for(const t of Object.keys(e)){const n=t.toLowerCase();(n.startsWith("npm_")||n.startsWith("yarn_")||n.startsWith("berry_"))&&delete e[t]}};
1
+ "use strict";var e=require("node:path"),n=require("dotenv");exports.loadEnvironmentVariables=function(o,t){let a=(o.env??[]).map((e=>e.toString()));const r=o.cascadeNodeEnv?process.env.NODE_ENV||"development":o.cascadeEnv;"string"==typeof r&&(0===a.length&&a.push(".env"),a=a.flatMap((e=>r?[`${e}.${r}.local`,`${e}.local`,`${e}.${r}`,e]:[`${e}.local`,e]))),o.verbose&&console.info("Loading env files:",a);let s={};for(const o of a)s={...n.config({path:e.join(t,o)}).parsed,...s};return s},exports.removeNpmAndYarnEnvironmentVariables=function(e){for(const n of Object.keys(e)){const o=n.toLowerCase();(o.startsWith("npm_")||o.startsWith("yarn_")||o.startsWith("berry_"))&&delete e[n]}},exports.yargsOptionsBuilderForEnv={env:{description:".env files to be loaded.",type:"array"},"cascade-env":{description:"environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)",type:"string"},"cascade-node-env":{description:'environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, "development" is applied. Preferred over `cascade`.',type:"boolean"}};
2
2
  //# sourceMappingURL=env.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"env.cjs","sources":["../../src/env.ts"],"sourcesContent":["import path from 'node:path';\n\nimport { config } from 'dotenv';\n\ninterface Options {\n env?: (string | number)[];\n cascadeEnv?: string;\n cascadeNodeEnv?: boolean;\n verbose?: boolean;\n}\n\n/**\n * This function loads environment variables from `.env` files.\n * */\nexport function loadEnvironmentVariables(argv: Options, cwd: string): Record<string, string> {\n let envPaths = (argv.env ?? []).map((envPath) => envPath.toString());\n const cascade = argv.cascadeNodeEnv ? process.env.NODE_ENV ?? '' : argv.cascadeEnv;\n if (typeof cascade === 'string') {\n if (envPaths.length === 0) envPaths.push('.env');\n envPaths = envPaths.flatMap((envPath) =>\n cascade\n ? [`${envPath}.${cascade}.local`, `${envPath}.local`, `${envPath}.${cascade}`, envPath]\n : [`${envPath}.local`, envPath]\n );\n }\n if (argv.verbose) {\n console.info('Loading env files:', envPaths);\n }\n\n let envVars = {};\n for (const envPath of envPaths) {\n envVars = { ...config({ path: path.join(cwd, envPath) }).parsed, ...envVars };\n }\n return envVars;\n}\n\n/**\n * This function removes environment variables related to npm and yarn from the given environment variables.\n * */\nexport function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void {\n // Remove npm & yarn environment variables from process.env\n for (const key of Object.keys(envVars)) {\n const lowerKey = key.toLowerCase();\n if (lowerKey.startsWith('npm_') || lowerKey.startsWith('yarn_') || lowerKey.startsWith('berry_')) {\n delete envVars[key];\n }\n }\n}\n"],"names":["argv","cwd","envPaths","env","map","envPath","toString","cascade","cascadeNodeEnv","process","NODE_ENV","cascadeEnv","length","push","flatMap","verbose","console","info","envVars","config","path","join","parsed","key","Object","keys","lowerKey","toLowerCase","startsWith"],"mappings":"6FAcO,SAAkCA,EAAeC,GACtD,IAAIC,GAAYF,EAAKG,KAAO,IAAIC,KAAKC,GAAYA,EAAQC,aACzD,MAAMC,EAAUP,EAAKQ,eAAiBC,QAAQN,IAAIO,UAAY,GAAKV,EAAKW,WACjD,iBAAZJ,IACe,IAApBL,EAASU,QAAcV,EAASW,KAAK,QACzCX,EAAWA,EAASY,SAAST,GAC3BE,EACI,CAAE,GAAEF,KAAWE,UAAkB,GAAEF,UAAkB,GAAEA,KAAWE,IAAWF,GAC7E,CAAE,GAAEA,UAAiBA,MAGzBL,EAAKe,SACPC,QAAQC,KAAK,qBAAsBf,GAGrC,IAAIgB,EAAU,CAAA,EACd,IAAK,MAAMb,KAAWH,EACpBgB,EAAU,IAAKC,SAAO,CAAEC,KAAMA,EAAKC,KAAKpB,EAAKI,KAAYiB,UAAWJ,GAEtE,OAAOA,CACT,+CAKO,SAA8CA,GAEnD,IAAK,MAAMK,KAAOC,OAAOC,KAAKP,GAAU,CACtC,MAAMQ,EAAWH,EAAII,eACjBD,EAASE,WAAW,SAAWF,EAASE,WAAW,UAAYF,EAASE,WAAW,mBAC9EV,EAAQK,EAEnB,CACF"}
1
+ {"version":3,"file":"env.cjs","sources":["../../src/env.ts"],"sourcesContent":["import path from 'node:path';\n\nimport { config } from 'dotenv';\n\ninterface Options {\n env?: (string | number)[];\n cascadeEnv?: string;\n cascadeNodeEnv?: boolean;\n verbose?: boolean;\n}\n\nexport const yargsOptionsBuilderForEnv = {\n env: {\n description: '.env files to be loaded.',\n type: 'array',\n },\n 'cascade-env': {\n description:\n 'environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)',\n type: 'string',\n },\n 'cascade-node-env': {\n description:\n 'environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, \"development\" is applied. Preferred over `cascade`.',\n type: 'boolean',\n },\n} as const;\n\n/**\n * This function loads environment variables from `.env` files.\n * */\nexport function loadEnvironmentVariables(argv: Options, cwd: string): Record<string, string> {\n let envPaths = (argv.env ?? []).map((envPath) => envPath.toString());\n const cascade = argv.cascadeNodeEnv ? process.env.NODE_ENV || 'development' : argv.cascadeEnv;\n if (typeof cascade === 'string') {\n if (envPaths.length === 0) envPaths.push('.env');\n envPaths = envPaths.flatMap((envPath) =>\n cascade\n ? [`${envPath}.${cascade}.local`, `${envPath}.local`, `${envPath}.${cascade}`, envPath]\n : [`${envPath}.local`, envPath]\n );\n }\n if (argv.verbose) {\n console.info('Loading env files:', envPaths);\n }\n\n let envVars = {};\n for (const envPath of envPaths) {\n envVars = { ...config({ path: path.join(cwd, envPath) }).parsed, ...envVars };\n }\n return envVars;\n}\n\n/**\n * This function removes environment variables related to npm and yarn from the given environment variables.\n * */\nexport function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void {\n // Remove npm & yarn environment variables from process.env\n for (const key of Object.keys(envVars)) {\n const lowerKey = key.toLowerCase();\n if (lowerKey.startsWith('npm_') || lowerKey.startsWith('yarn_') || lowerKey.startsWith('berry_')) {\n delete envVars[key];\n }\n }\n}\n"],"names":["argv","cwd","envPaths","env","map","envPath","toString","cascade","cascadeNodeEnv","process","NODE_ENV","cascadeEnv","length","push","flatMap","verbose","console","info","envVars","config","path","join","parsed","key","Object","keys","lowerKey","toLowerCase","startsWith","description","type"],"mappings":"6FA+BO,SAAkCA,EAAeC,GACtD,IAAIC,GAAYF,EAAKG,KAAO,IAAIC,KAAKC,GAAYA,EAAQC,aACzD,MAAMC,EAAUP,EAAKQ,eAAiBC,QAAQN,IAAIO,UAAY,cAAgBV,EAAKW,WAC5D,iBAAZJ,IACe,IAApBL,EAASU,QAAcV,EAASW,KAAK,QACzCX,EAAWA,EAASY,SAAST,GAC3BE,EACI,CAAE,GAAEF,KAAWE,UAAkB,GAAEF,UAAkB,GAAEA,KAAWE,IAAWF,GAC7E,CAAE,GAAEA,UAAiBA,MAGzBL,EAAKe,SACPC,QAAQC,KAAK,qBAAsBf,GAGrC,IAAIgB,EAAU,CAAA,EACd,IAAK,MAAMb,KAAWH,EACpBgB,EAAU,IAAKC,SAAO,CAAEC,KAAMA,EAAKC,KAAKpB,EAAKI,KAAYiB,UAAWJ,GAEtE,OAAOA,CACT,+CAKO,SAA8CA,GAEnD,IAAK,MAAMK,KAAOC,OAAOC,KAAKP,GAAU,CACtC,MAAMQ,EAAWH,EAAII,eACjBD,EAASE,WAAW,SAAWF,EAASE,WAAW,UAAYF,EAASE,WAAW,mBAC9EV,EAAQK,EAEnB,CACF,oCArDyC,CACvCpB,IAAK,CACH0B,YAAa,2BACbC,KAAM,SAER,cAAe,CACbD,YACE,6HACFC,KAAM,UAER,mBAAoB,CAClBD,YACE,kMACFC,KAAM"}
@@ -4,6 +4,20 @@ interface Options {
4
4
  cascadeNodeEnv?: boolean;
5
5
  verbose?: boolean;
6
6
  }
7
+ declare const yargsOptionsBuilderForEnv: {
8
+ readonly env: {
9
+ readonly description: ".env files to be loaded.";
10
+ readonly type: "array";
11
+ };
12
+ readonly 'cascade-env': {
13
+ readonly description: "environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)";
14
+ readonly type: "string";
15
+ };
16
+ readonly 'cascade-node-env': {
17
+ readonly description: "environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, \"development\" is applied. Preferred over `cascade`.";
18
+ readonly type: "boolean";
19
+ };
20
+ };
7
21
  /**
8
22
  * This function loads environment variables from `.env` files.
9
23
  * */
@@ -12,4 +26,4 @@ declare function loadEnvironmentVariables(argv: Options, cwd: string): Record<st
12
26
  * This function removes environment variables related to npm and yarn from the given environment variables.
13
27
  * */
14
28
  declare function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void;
15
- export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables };
29
+ export { yargsOptionsBuilderForEnv, loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables };
@@ -1 +1 @@
1
- {"version":3,"file":"hash.cjs","sources":["../../src/hash.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Calculate hash from files.\n * @param paths Paths to the files or directories.\n * @returns Hash string.\n */\nexport async function calculateHashFromFiles(...paths: string[]): Promise<string> {\n const hash = crypto.createHash('sha512');\n for (const fileOrDirPath of paths.sort()) {\n const stat = await fs.promises.stat(fileOrDirPath);\n if (stat.isDirectory()) {\n // Get all files in the directory\n const dirents = await fs.promises.readdir(fileOrDirPath, { withFileTypes: true, recursive: true });\n for (const dirent of dirents.sort((d1, d2) => d1.name.localeCompare(d2.name))) {\n if (dirent.isFile()) {\n // Node.js 18.17.0 or later has `dirent.path`\n hash.update(\n await fs.promises.readFile(path.join((dirent as unknown as { path: string }).path, dirent.name), 'utf8')\n );\n }\n }\n } else if (stat.isFile()) {\n hash.update(await fs.promises.readFile(fileOrDirPath, 'utf8'));\n }\n }\n return hash.digest('hex');\n}\n\n/**\n * Update hash file if the hash is different from the current one.\n * @param hashFilePath Path to the hash file.\n * @param paths Paths to the files or directories.\n * @returns Whether the hash file was updated.\n */\nexport async function updateHashFromFiles(hashFilePath: string, ...paths: string[]): Promise<boolean> {\n let oldHash = '';\n try {\n oldHash = await fs.promises.readFile(hashFilePath, 'utf8');\n } catch {\n // do nothing\n }\n const newHash = await calculateHashFromFiles(...paths);\n if (oldHash === newHash) return false;\n\n await fs.promises.writeFile(hashFilePath, newHash, 'utf8');\n return true;\n}\n"],"names":["async","calculateHashFromFiles","paths","hash","crypto","createHash","fileOrDirPath","sort","stat","fs","promises","isDirectory","dirents","readdir","withFileTypes","recursive","dirent","d1","d2","name","localeCompare","isFile","update","readFile","path","join","digest","hashFilePath","oldHash","newHash","writeFile"],"mappings":"sFASOA,eAAeC,KAA0BC,GAC9C,MAAMC,EAAOC,EAAOC,WAAW,UAC/B,IAAK,MAAMC,KAAiBJ,EAAMK,OAAQ,CACxC,MAAMC,QAAaC,EAAGC,SAASF,KAAKF,GACpC,GAAIE,EAAKG,cAAe,CAEtB,MAAMC,QAAgBH,EAAGC,SAASG,QAAQP,EAAe,CAAEQ,eAAe,EAAMC,WAAW,IAC3F,IAAK,MAAMC,KAAUJ,EAAQL,MAAK,CAACU,EAAIC,IAAOD,EAAGE,KAAKC,cAAcF,EAAGC,QACjEH,EAAOK,UAETlB,EAAKmB,aACGb,EAAGC,SAASa,SAASC,EAAKC,KAAMT,EAAuCQ,KAAMR,EAAOG,MAAO,QAIzG,MAAWX,EAAKa,UACdlB,EAAKmB,aAAab,EAAGC,SAASa,SAASjB,EAAe,QAE1D,CACA,OAAOH,EAAKuB,OAAO,MACrB,8DAQO1B,eAAmC2B,KAAyBzB,GACjE,IAAI0B,EAAU,GACd,IACEA,QAAgBnB,EAAGC,SAASa,SAASI,EAAc,OACrD,CAAE,MACA,CAEF,MAAME,QAAgB5B,KAA0BC,GAChD,OAAI0B,IAAYC,UAEVpB,EAAGC,SAASoB,UAAUH,EAAcE,EAAS,SAC5C,EACT"}
1
+ {"version":3,"file":"hash.cjs","sources":["../../src/hash.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Calculate hash from files.\n * @param paths Paths to the files or directories.\n * @returns Hash string.\n */\nexport async function calculateHashFromFiles(...paths: string[]): Promise<string> {\n const hash = crypto.createHash('sha512');\n for (const fileOrDirPath of paths.sort()) {\n const stat = await fs.promises.stat(fileOrDirPath);\n if (stat.isDirectory()) {\n // Get all files in the directory\n const dirents = await fs.promises.readdir(fileOrDirPath, { withFileTypes: true, recursive: true });\n for (const dirent of dirents.sort((d1, d2) => d1.name.localeCompare(d2.name))) {\n if (dirent.isFile()) {\n // Node.js 18.17.0 or later has `dirent.path`\n hash.update(\n await fs.promises.readFile(\n path.join((dirent as unknown as Record<'path', string>).path, dirent.name),\n 'utf8'\n )\n );\n }\n }\n } else if (stat.isFile()) {\n hash.update(await fs.promises.readFile(fileOrDirPath, 'utf8'));\n }\n }\n return hash.digest('hex');\n}\n\n/**\n * Update hash file if the hash is different from the current one.\n * @param hashFilePath Path to the hash file.\n * @param paths Paths to the files or directories.\n * @returns Whether the hash file was updated.\n */\nexport async function updateHashFromFiles(hashFilePath: string, ...paths: string[]): Promise<boolean> {\n let oldHash = '';\n try {\n oldHash = await fs.promises.readFile(hashFilePath, 'utf8');\n } catch {\n // do nothing\n }\n const newHash = await calculateHashFromFiles(...paths);\n if (oldHash === newHash) return false;\n\n await fs.promises.writeFile(hashFilePath, newHash, 'utf8');\n return true;\n}\n"],"names":["async","calculateHashFromFiles","paths","hash","crypto","createHash","fileOrDirPath","sort","stat","fs","promises","isDirectory","dirents","readdir","withFileTypes","recursive","dirent","d1","d2","name","localeCompare","isFile","update","readFile","path","join","digest","hashFilePath","oldHash","newHash","writeFile"],"mappings":"sFASOA,eAAeC,KAA0BC,GAC9C,MAAMC,EAAOC,EAAOC,WAAW,UAC/B,IAAK,MAAMC,KAAiBJ,EAAMK,OAAQ,CACxC,MAAMC,QAAaC,EAAGC,SAASF,KAAKF,GACpC,GAAIE,EAAKG,cAAe,CAEtB,MAAMC,QAAgBH,EAAGC,SAASG,QAAQP,EAAe,CAAEQ,eAAe,EAAMC,WAAW,IAC3F,IAAK,MAAMC,KAAUJ,EAAQL,MAAK,CAACU,EAAIC,IAAOD,EAAGE,KAAKC,cAAcF,EAAGC,QACjEH,EAAOK,UAETlB,EAAKmB,aACGb,EAAGC,SAASa,SAChBC,EAAKC,KAAMT,EAA6CQ,KAAMR,EAAOG,MACrE,QAKV,MAAWX,EAAKa,UACdlB,EAAKmB,aAAab,EAAGC,SAASa,SAASjB,EAAe,QAE1D,CACA,OAAOH,EAAKuB,OAAO,MACrB,8DAQO1B,eAAmC2B,KAAyBzB,GACjE,IAAI0B,EAAU,GACd,IACEA,QAAgBnB,EAAGC,SAASa,SAASI,EAAc,OACrD,CAAE,MACA,CAEF,MAAME,QAAgB5B,KAA0BC,GAChD,OAAI0B,IAAYC,UAEVpB,EAAGC,SAASoB,UAAUH,EAAcE,EAAS,SAC5C,EACT"}
@@ -1,2 +1,2 @@
1
- "use strict";var e=require("./env.cjs"),s=require("./exists.cjs"),r=require("./hash.cjs"),a=require("./spawn.cjs");exports.loadEnvironmentVariables=e.loadEnvironmentVariables,exports.removeNpmAndYarnEnvironmentVariables=e.removeNpmAndYarnEnvironmentVariables,exports.existsAsync=s.existsAsync,exports.calculateHashFromFiles=r.calculateHashFromFiles,exports.updateHashFromFiles=r.updateHashFromFiles,exports.spawnAsync=a.spawnAsync;
1
+ "use strict";var s=require("./env.cjs"),e=require("./exists.cjs"),r=require("./hash.cjs"),a=require("./spawn.cjs");exports.loadEnvironmentVariables=s.loadEnvironmentVariables,exports.removeNpmAndYarnEnvironmentVariables=s.removeNpmAndYarnEnvironmentVariables,exports.yargsOptionsBuilderForEnv=s.yargsOptionsBuilderForEnv,exports.existsAsync=e.existsAsync,exports.calculateHashFromFiles=r.calculateHashFromFiles,exports.updateHashFromFiles=r.updateHashFromFiles,exports.spawnAsync=a.spawnAsync;
2
2
  //# sourceMappingURL=index.cjs.map
@@ -1,4 +1,4 @@
1
- export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables } from "./env.js";
1
+ export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables, yargsOptionsBuilderForEnv } from "./env.js";
2
2
  export { existsAsync } from "./exists.js";
3
3
  export { calculateHashFromFiles, updateHashFromFiles } from "./hash.js";
4
4
  export { spawnAsync } from "./spawn.js";
package/dist/esm/env.d.ts CHANGED
@@ -4,6 +4,20 @@ interface Options {
4
4
  cascadeNodeEnv?: boolean;
5
5
  verbose?: boolean;
6
6
  }
7
+ declare const yargsOptionsBuilderForEnv: {
8
+ readonly env: {
9
+ readonly description: ".env files to be loaded.";
10
+ readonly type: "array";
11
+ };
12
+ readonly 'cascade-env': {
13
+ readonly description: "environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)";
14
+ readonly type: "string";
15
+ };
16
+ readonly 'cascade-node-env': {
17
+ readonly description: "environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, \"development\" is applied. Preferred over `cascade`.";
18
+ readonly type: "boolean";
19
+ };
20
+ };
7
21
  /**
8
22
  * This function loads environment variables from `.env` files.
9
23
  * */
@@ -12,4 +26,4 @@ declare function loadEnvironmentVariables(argv: Options, cwd: string): Record<st
12
26
  * This function removes environment variables related to npm and yarn from the given environment variables.
13
27
  * */
14
28
  declare function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void;
15
- export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables };
29
+ export { yargsOptionsBuilderForEnv, loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables };
package/dist/esm/env.js CHANGED
@@ -1,2 +1,2 @@
1
- import t from"node:path";import{config as o}from"dotenv";function e(e,n){let s=(e.env??[]).map((t=>t.toString()));const r=e.cascadeNodeEnv?process.env.NODE_ENV??"":e.cascadeEnv;"string"==typeof r&&(0===s.length&&s.push(".env"),s=s.flatMap((t=>r?[`${t}.${r}.local`,`${t}.local`,`${t}.${r}`,t]:[`${t}.local`,t]))),e.verbose&&console.info("Loading env files:",s);let a={};for(const e of s)a={...o({path:t.join(n,e)}).parsed,...a};return a}function n(t){for(const o of Object.keys(t)){const e=o.toLowerCase();(e.startsWith("npm_")||e.startsWith("yarn_")||e.startsWith("berry_"))&&delete t[o]}}export{e as loadEnvironmentVariables,n as removeNpmAndYarnEnvironmentVariables};
1
+ import e from"node:path";import{config as n}from"dotenv";const o={env:{description:".env files to be loaded.",type:"array"},"cascade-env":{description:"environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)",type:"string"},"cascade-node-env":{description:'environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, "development" is applied. Preferred over `cascade`.',type:"boolean"}};function t(o,t){let a=(o.env??[]).map((e=>e.toString()));const s=o.cascadeNodeEnv?process.env.NODE_ENV||"development":o.cascadeEnv;"string"==typeof s&&(0===a.length&&a.push(".env"),a=a.flatMap((e=>s?[`${e}.${s}.local`,`${e}.local`,`${e}.${s}`,e]:[`${e}.local`,e]))),o.verbose&&console.info("Loading env files:",a);let r={};for(const o of a)r={...n({path:e.join(t,o)}).parsed,...r};return r}function a(e){for(const n of Object.keys(e)){const o=n.toLowerCase();(o.startsWith("npm_")||o.startsWith("yarn_")||o.startsWith("berry_"))&&delete e[n]}}export{t as loadEnvironmentVariables,a as removeNpmAndYarnEnvironmentVariables,o as yargsOptionsBuilderForEnv};
2
2
  //# sourceMappingURL=env.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"env.js","sources":["../../src/env.ts"],"sourcesContent":["import path from 'node:path';\n\nimport { config } from 'dotenv';\n\ninterface Options {\n env?: (string | number)[];\n cascadeEnv?: string;\n cascadeNodeEnv?: boolean;\n verbose?: boolean;\n}\n\n/**\n * This function loads environment variables from `.env` files.\n * */\nexport function loadEnvironmentVariables(argv: Options, cwd: string): Record<string, string> {\n let envPaths = (argv.env ?? []).map((envPath) => envPath.toString());\n const cascade = argv.cascadeNodeEnv ? process.env.NODE_ENV ?? '' : argv.cascadeEnv;\n if (typeof cascade === 'string') {\n if (envPaths.length === 0) envPaths.push('.env');\n envPaths = envPaths.flatMap((envPath) =>\n cascade\n ? [`${envPath}.${cascade}.local`, `${envPath}.local`, `${envPath}.${cascade}`, envPath]\n : [`${envPath}.local`, envPath]\n );\n }\n if (argv.verbose) {\n console.info('Loading env files:', envPaths);\n }\n\n let envVars = {};\n for (const envPath of envPaths) {\n envVars = { ...config({ path: path.join(cwd, envPath) }).parsed, ...envVars };\n }\n return envVars;\n}\n\n/**\n * This function removes environment variables related to npm and yarn from the given environment variables.\n * */\nexport function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void {\n // Remove npm & yarn environment variables from process.env\n for (const key of Object.keys(envVars)) {\n const lowerKey = key.toLowerCase();\n if (lowerKey.startsWith('npm_') || lowerKey.startsWith('yarn_') || lowerKey.startsWith('berry_')) {\n delete envVars[key];\n }\n }\n}\n"],"names":["loadEnvironmentVariables","argv","cwd","envPaths","env","map","envPath","toString","cascade","cascadeNodeEnv","process","NODE_ENV","cascadeEnv","length","push","flatMap","verbose","console","info","envVars","config","path","join","parsed","removeNpmAndYarnEnvironmentVariables","key","Object","keys","lowerKey","toLowerCase","startsWith"],"mappings":"yDAcO,SAASA,EAAyBC,EAAeC,GACtD,IAAIC,GAAYF,EAAKG,KAAO,IAAIC,KAAKC,GAAYA,EAAQC,aACzD,MAAMC,EAAUP,EAAKQ,eAAiBC,QAAQN,IAAIO,UAAY,GAAKV,EAAKW,WACjD,iBAAZJ,IACe,IAApBL,EAASU,QAAcV,EAASW,KAAK,QACzCX,EAAWA,EAASY,SAAST,GAC3BE,EACI,CAAE,GAAEF,KAAWE,UAAkB,GAAEF,UAAkB,GAAEA,KAAWE,IAAWF,GAC7E,CAAE,GAAEA,UAAiBA,MAGzBL,EAAKe,SACPC,QAAQC,KAAK,qBAAsBf,GAGrC,IAAIgB,EAAU,CAAA,EACd,IAAK,MAAMb,KAAWH,EACpBgB,EAAU,IAAKC,EAAO,CAAEC,KAAMA,EAAKC,KAAKpB,EAAKI,KAAYiB,UAAWJ,GAEtE,OAAOA,CACT,CAKO,SAASK,EAAqCL,GAEnD,IAAK,MAAMM,KAAOC,OAAOC,KAAKR,GAAU,CACtC,MAAMS,EAAWH,EAAII,eACjBD,EAASE,WAAW,SAAWF,EAASE,WAAW,UAAYF,EAASE,WAAW,mBAC9EX,EAAQM,EAEnB,CACF"}
1
+ {"version":3,"file":"env.js","sources":["../../src/env.ts"],"sourcesContent":["import path from 'node:path';\n\nimport { config } from 'dotenv';\n\ninterface Options {\n env?: (string | number)[];\n cascadeEnv?: string;\n cascadeNodeEnv?: boolean;\n verbose?: boolean;\n}\n\nexport const yargsOptionsBuilderForEnv = {\n env: {\n description: '.env files to be loaded.',\n type: 'array',\n },\n 'cascade-env': {\n description:\n 'environment to load cascading .env files (e.g., `.env`, `.env.<environment>`, `.env.local` and `.env.<environment>.local`)',\n type: 'string',\n },\n 'cascade-node-env': {\n description:\n 'environment to load cascading .env files (e.g., `.env`, `.env.<NODE_ENV>`, `.env.local` and `.env.<NODE_ENV>.local`). If NODE_ENV is falsy, \"development\" is applied. Preferred over `cascade`.',\n type: 'boolean',\n },\n} as const;\n\n/**\n * This function loads environment variables from `.env` files.\n * */\nexport function loadEnvironmentVariables(argv: Options, cwd: string): Record<string, string> {\n let envPaths = (argv.env ?? []).map((envPath) => envPath.toString());\n const cascade = argv.cascadeNodeEnv ? process.env.NODE_ENV || 'development' : argv.cascadeEnv;\n if (typeof cascade === 'string') {\n if (envPaths.length === 0) envPaths.push('.env');\n envPaths = envPaths.flatMap((envPath) =>\n cascade\n ? [`${envPath}.${cascade}.local`, `${envPath}.local`, `${envPath}.${cascade}`, envPath]\n : [`${envPath}.local`, envPath]\n );\n }\n if (argv.verbose) {\n console.info('Loading env files:', envPaths);\n }\n\n let envVars = {};\n for (const envPath of envPaths) {\n envVars = { ...config({ path: path.join(cwd, envPath) }).parsed, ...envVars };\n }\n return envVars;\n}\n\n/**\n * This function removes environment variables related to npm and yarn from the given environment variables.\n * */\nexport function removeNpmAndYarnEnvironmentVariables(envVars: Record<string, unknown>): void {\n // Remove npm & yarn environment variables from process.env\n for (const key of Object.keys(envVars)) {\n const lowerKey = key.toLowerCase();\n if (lowerKey.startsWith('npm_') || lowerKey.startsWith('yarn_') || lowerKey.startsWith('berry_')) {\n delete envVars[key];\n }\n }\n}\n"],"names":["yargsOptionsBuilderForEnv","env","description","type","loadEnvironmentVariables","argv","cwd","envPaths","map","envPath","toString","cascade","cascadeNodeEnv","process","NODE_ENV","cascadeEnv","length","push","flatMap","verbose","console","info","envVars","config","path","join","parsed","removeNpmAndYarnEnvironmentVariables","key","Object","keys","lowerKey","toLowerCase","startsWith"],"mappings":"yDAWO,MAAMA,EAA4B,CACvCC,IAAK,CACHC,YAAa,2BACbC,KAAM,SAER,cAAe,CACbD,YACE,6HACFC,KAAM,UAER,mBAAoB,CAClBD,YACE,kMACFC,KAAM,YAOH,SAASC,EAAyBC,EAAeC,GACtD,IAAIC,GAAYF,EAAKJ,KAAO,IAAIO,KAAKC,GAAYA,EAAQC,aACzD,MAAMC,EAAUN,EAAKO,eAAiBC,QAAQZ,IAAIa,UAAY,cAAgBT,EAAKU,WAC5D,iBAAZJ,IACe,IAApBJ,EAASS,QAAcT,EAASU,KAAK,QACzCV,EAAWA,EAASW,SAAST,GAC3BE,EACI,CAAE,GAAEF,KAAWE,UAAkB,GAAEF,UAAkB,GAAEA,KAAWE,IAAWF,GAC7E,CAAE,GAAEA,UAAiBA,MAGzBJ,EAAKc,SACPC,QAAQC,KAAK,qBAAsBd,GAGrC,IAAIe,EAAU,CAAA,EACd,IAAK,MAAMb,KAAWF,EACpBe,EAAU,IAAKC,EAAO,CAAEC,KAAMA,EAAKC,KAAKnB,EAAKG,KAAYiB,UAAWJ,GAEtE,OAAOA,CACT,CAKO,SAASK,EAAqCL,GAEnD,IAAK,MAAMM,KAAOC,OAAOC,KAAKR,GAAU,CACtC,MAAMS,EAAWH,EAAII,eACjBD,EAASE,WAAW,SAAWF,EAASE,WAAW,UAAYF,EAASE,WAAW,mBAC9EX,EAAQM,EAEnB,CACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"hash.js","sources":["../../src/hash.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Calculate hash from files.\n * @param paths Paths to the files or directories.\n * @returns Hash string.\n */\nexport async function calculateHashFromFiles(...paths: string[]): Promise<string> {\n const hash = crypto.createHash('sha512');\n for (const fileOrDirPath of paths.sort()) {\n const stat = await fs.promises.stat(fileOrDirPath);\n if (stat.isDirectory()) {\n // Get all files in the directory\n const dirents = await fs.promises.readdir(fileOrDirPath, { withFileTypes: true, recursive: true });\n for (const dirent of dirents.sort((d1, d2) => d1.name.localeCompare(d2.name))) {\n if (dirent.isFile()) {\n // Node.js 18.17.0 or later has `dirent.path`\n hash.update(\n await fs.promises.readFile(path.join((dirent as unknown as { path: string }).path, dirent.name), 'utf8')\n );\n }\n }\n } else if (stat.isFile()) {\n hash.update(await fs.promises.readFile(fileOrDirPath, 'utf8'));\n }\n }\n return hash.digest('hex');\n}\n\n/**\n * Update hash file if the hash is different from the current one.\n * @param hashFilePath Path to the hash file.\n * @param paths Paths to the files or directories.\n * @returns Whether the hash file was updated.\n */\nexport async function updateHashFromFiles(hashFilePath: string, ...paths: string[]): Promise<boolean> {\n let oldHash = '';\n try {\n oldHash = await fs.promises.readFile(hashFilePath, 'utf8');\n } catch {\n // do nothing\n }\n const newHash = await calculateHashFromFiles(...paths);\n if (oldHash === newHash) return false;\n\n await fs.promises.writeFile(hashFilePath, newHash, 'utf8');\n return true;\n}\n"],"names":["async","calculateHashFromFiles","paths","hash","crypto","createHash","fileOrDirPath","sort","stat","fs","promises","isDirectory","dirents","readdir","withFileTypes","recursive","dirent","d1","d2","name","localeCompare","isFile","update","readFile","path","join","digest","updateHashFromFiles","hashFilePath","oldHash","newHash","writeFile"],"mappings":"2EASOA,eAAeC,KAA0BC,GAC9C,MAAMC,EAAOC,EAAOC,WAAW,UAC/B,IAAK,MAAMC,KAAiBJ,EAAMK,OAAQ,CACxC,MAAMC,QAAaC,EAAGC,SAASF,KAAKF,GACpC,GAAIE,EAAKG,cAAe,CAEtB,MAAMC,QAAgBH,EAAGC,SAASG,QAAQP,EAAe,CAAEQ,eAAe,EAAMC,WAAW,IAC3F,IAAK,MAAMC,KAAUJ,EAAQL,MAAK,CAACU,EAAIC,IAAOD,EAAGE,KAAKC,cAAcF,EAAGC,QACjEH,EAAOK,UAETlB,EAAKmB,aACGb,EAAGC,SAASa,SAASC,EAAKC,KAAMT,EAAuCQ,KAAMR,EAAOG,MAAO,QAIzG,MAAWX,EAAKa,UACdlB,EAAKmB,aAAab,EAAGC,SAASa,SAASjB,EAAe,QAE1D,CACA,OAAOH,EAAKuB,OAAO,MACrB,CAQO1B,eAAe2B,EAAoBC,KAAyB1B,GACjE,IAAI2B,EAAU,GACd,IACEA,QAAgBpB,EAAGC,SAASa,SAASK,EAAc,OACrD,CAAE,MACA,CAEF,MAAME,QAAgB7B,KAA0BC,GAChD,OAAI2B,IAAYC,UAEVrB,EAAGC,SAASqB,UAAUH,EAAcE,EAAS,SAC5C,EACT"}
1
+ {"version":3,"file":"hash.js","sources":["../../src/hash.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\n/**\n * Calculate hash from files.\n * @param paths Paths to the files or directories.\n * @returns Hash string.\n */\nexport async function calculateHashFromFiles(...paths: string[]): Promise<string> {\n const hash = crypto.createHash('sha512');\n for (const fileOrDirPath of paths.sort()) {\n const stat = await fs.promises.stat(fileOrDirPath);\n if (stat.isDirectory()) {\n // Get all files in the directory\n const dirents = await fs.promises.readdir(fileOrDirPath, { withFileTypes: true, recursive: true });\n for (const dirent of dirents.sort((d1, d2) => d1.name.localeCompare(d2.name))) {\n if (dirent.isFile()) {\n // Node.js 18.17.0 or later has `dirent.path`\n hash.update(\n await fs.promises.readFile(\n path.join((dirent as unknown as Record<'path', string>).path, dirent.name),\n 'utf8'\n )\n );\n }\n }\n } else if (stat.isFile()) {\n hash.update(await fs.promises.readFile(fileOrDirPath, 'utf8'));\n }\n }\n return hash.digest('hex');\n}\n\n/**\n * Update hash file if the hash is different from the current one.\n * @param hashFilePath Path to the hash file.\n * @param paths Paths to the files or directories.\n * @returns Whether the hash file was updated.\n */\nexport async function updateHashFromFiles(hashFilePath: string, ...paths: string[]): Promise<boolean> {\n let oldHash = '';\n try {\n oldHash = await fs.promises.readFile(hashFilePath, 'utf8');\n } catch {\n // do nothing\n }\n const newHash = await calculateHashFromFiles(...paths);\n if (oldHash === newHash) return false;\n\n await fs.promises.writeFile(hashFilePath, newHash, 'utf8');\n return true;\n}\n"],"names":["async","calculateHashFromFiles","paths","hash","crypto","createHash","fileOrDirPath","sort","stat","fs","promises","isDirectory","dirents","readdir","withFileTypes","recursive","dirent","d1","d2","name","localeCompare","isFile","update","readFile","path","join","digest","updateHashFromFiles","hashFilePath","oldHash","newHash","writeFile"],"mappings":"2EASOA,eAAeC,KAA0BC,GAC9C,MAAMC,EAAOC,EAAOC,WAAW,UAC/B,IAAK,MAAMC,KAAiBJ,EAAMK,OAAQ,CACxC,MAAMC,QAAaC,EAAGC,SAASF,KAAKF,GACpC,GAAIE,EAAKG,cAAe,CAEtB,MAAMC,QAAgBH,EAAGC,SAASG,QAAQP,EAAe,CAAEQ,eAAe,EAAMC,WAAW,IAC3F,IAAK,MAAMC,KAAUJ,EAAQL,MAAK,CAACU,EAAIC,IAAOD,EAAGE,KAAKC,cAAcF,EAAGC,QACjEH,EAAOK,UAETlB,EAAKmB,aACGb,EAAGC,SAASa,SAChBC,EAAKC,KAAMT,EAA6CQ,KAAMR,EAAOG,MACrE,QAKV,MAAWX,EAAKa,UACdlB,EAAKmB,aAAab,EAAGC,SAASa,SAASjB,EAAe,QAE1D,CACA,OAAOH,EAAKuB,OAAO,MACrB,CAQO1B,eAAe2B,EAAoBC,KAAyB1B,GACjE,IAAI2B,EAAU,GACd,IACEA,QAAgBpB,EAAGC,SAASa,SAASK,EAAc,OACrD,CAAE,MACA,CAEF,MAAME,QAAgB7B,KAA0BC,GAChD,OAAI2B,IAAYC,UAEVrB,EAAGC,SAASqB,UAAUH,EAAcE,EAAS,SAC5C,EACT"}
@@ -1,4 +1,4 @@
1
- export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables } from "./env.js";
1
+ export { loadEnvironmentVariables, removeNpmAndYarnEnvironmentVariables, yargsOptionsBuilderForEnv } from "./env.js";
2
2
  export { existsAsync } from "./exists.js";
3
3
  export { calculateHashFromFiles, updateHashFromFiles } from "./hash.js";
4
4
  export { spawnAsync } from "./spawn.js";
package/dist/esm/index.js CHANGED
@@ -1,2 +1,2 @@
1
- export{loadEnvironmentVariables,removeNpmAndYarnEnvironmentVariables}from"./env.js";export{existsAsync}from"./exists.js";export{calculateHashFromFiles,updateHashFromFiles}from"./hash.js";export{spawnAsync}from"./spawn.js";
1
+ export{loadEnvironmentVariables,removeNpmAndYarnEnvironmentVariables,yargsOptionsBuilderForEnv}from"./env.js";export{existsAsync}from"./exists.js";export{calculateHashFromFiles,updateHashFromFiles}from"./hash.js";export{spawnAsync}from"./spawn.js";
2
2
  //# sourceMappingURL=index.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@willbooster/shared-lib-node",
3
- "version": "2.5.2",
3
+ "version": "2.7.0",
4
4
  "license": "Apache-2.0",
5
5
  "author": "WillBooster Inc.",
6
6
  "sideEffects": false,