@ant.sh/colony 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -1
- package/dist/cjs/dotenv.js +84 -0
- package/dist/cjs/dotenv.js.map +7 -0
- package/dist/cjs/index.js +23 -1
- package/dist/cjs/index.js.map +3 -3
- package/dist/cjs/resolver.js +2 -2
- package/dist/cjs/resolver.js.map +2 -2
- package/dist/cjs/strings.js +7 -6
- package/dist/cjs/strings.js.map +2 -2
- package/dist/esm/dotenv.js +49 -0
- package/dist/esm/dotenv.js.map +7 -0
- package/dist/esm/index.d.ts +26 -0
- package/dist/esm/index.js +20 -1
- package/dist/esm/index.js.map +3 -3
- package/dist/esm/resolver.js +2 -2
- package/dist/esm/resolver.js.map +2 -2
- package/dist/esm/strings.js +7 -6
- package/dist/esm/strings.js.map +2 -2
- package/package.json +1 -1
- package/src/dotenv.js +81 -0
- package/src/index.d.ts +26 -0
- package/src/index.js +22 -1
- package/src/resolver.js +2 -2
- package/src/strings.js +9 -6
package/README.md
CHANGED
|
@@ -5,6 +5,8 @@
|
|
|
5
5
|
[](https://www.npmjs.com/package/@ant.sh/colony)
|
|
6
6
|
[](https://opensource.org/licenses/MIT)
|
|
7
7
|
|
|
8
|
+
**[Try it in the Playground](https://colony.ant.sh)**
|
|
9
|
+
|
|
8
10
|
```
|
|
9
11
|
# config/app.colony
|
|
10
12
|
*.database.host = "localhost";
|
|
@@ -157,13 +159,17 @@ console.log(`Database: ${config.database.host}`);
|
|
|
157
159
|
|
|
158
160
|
## Documentation
|
|
159
161
|
|
|
160
|
-
|
|
162
|
+
- **[Interactive Playground](https://colony.ant.sh)** - Try Colony in your browser
|
|
163
|
+
- **[Online Docs](https://colony.ant.sh/docs.html)** - Full documentation
|
|
164
|
+
- **[DOCS.md](./DOCS.md)** - Local documentation file
|
|
161
165
|
|
|
166
|
+
Topics covered:
|
|
162
167
|
- Config syntax reference
|
|
163
168
|
- All operators (`=`, `:=`, `+=`, `-=`, `|=`)
|
|
164
169
|
- Interpolation patterns
|
|
165
170
|
- Secret providers
|
|
166
171
|
- Security sandbox options
|
|
172
|
+
- Framework integrations (Express, Fastify, Next.js)
|
|
167
173
|
- API reference
|
|
168
174
|
- TypeScript types
|
|
169
175
|
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var dotenv_exports = {};
|
|
29
|
+
__export(dotenv_exports, {
|
|
30
|
+
loadDotenv: () => loadDotenv,
|
|
31
|
+
loadDotenvFiles: () => loadDotenvFiles,
|
|
32
|
+
parseDotenv: () => parseDotenv
|
|
33
|
+
});
|
|
34
|
+
module.exports = __toCommonJS(dotenv_exports);
|
|
35
|
+
var import_promises = __toESM(require("node:fs/promises"), 1);
|
|
36
|
+
var import_node_path = __toESM(require("node:path"), 1);
|
|
37
|
+
function parseDotenv(content) {
|
|
38
|
+
const result = {};
|
|
39
|
+
const lines = content.split(/\r?\n/);
|
|
40
|
+
for (let line of lines) {
|
|
41
|
+
line = line.trim();
|
|
42
|
+
if (!line || line.startsWith("#")) continue;
|
|
43
|
+
const match = line.match(/^([^=]+?)\s*=\s*(.*)$/);
|
|
44
|
+
if (!match) continue;
|
|
45
|
+
const key = match[1].trim();
|
|
46
|
+
let value = match[2].trim();
|
|
47
|
+
if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
|
|
48
|
+
value = value.slice(1, -1);
|
|
49
|
+
if (value.startsWith('"')) {
|
|
50
|
+
value = value.replace(/\\n/g, "\n").replace(/\\r/g, "\r").replace(/\\t/g, " ").replace(/\\"/g, '"').replace(/\\\\/g, "\\");
|
|
51
|
+
}
|
|
52
|
+
} else {
|
|
53
|
+
const commentIdx = value.indexOf(" #");
|
|
54
|
+
if (commentIdx !== -1) {
|
|
55
|
+
value = value.slice(0, commentIdx).trim();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
result[key] = value;
|
|
59
|
+
}
|
|
60
|
+
return result;
|
|
61
|
+
}
|
|
62
|
+
async function loadDotenv(filePath) {
|
|
63
|
+
const content = await import_promises.default.readFile(import_node_path.default.resolve(filePath), "utf8");
|
|
64
|
+
return parseDotenv(content);
|
|
65
|
+
}
|
|
66
|
+
async function loadDotenvFiles(filePaths) {
|
|
67
|
+
const result = {};
|
|
68
|
+
for (const filePath of filePaths) {
|
|
69
|
+
try {
|
|
70
|
+
const vars = await loadDotenv(filePath);
|
|
71
|
+
Object.assign(result, vars);
|
|
72
|
+
} catch (err) {
|
|
73
|
+
if (err.code !== "ENOENT") throw err;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return result;
|
|
77
|
+
}
|
|
78
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
79
|
+
0 && (module.exports = {
|
|
80
|
+
loadDotenv,
|
|
81
|
+
loadDotenvFiles,
|
|
82
|
+
parseDotenv
|
|
83
|
+
});
|
|
84
|
+
//# sourceMappingURL=dotenv.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/dotenv.js"],
|
|
4
|
+
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\n\n/**\n * Parse a dotenv file and return key-value pairs\n * @param {string} content - The content of the .env file\n * @returns {Record<string, string>}\n */\nexport function parseDotenv(content) {\n const result = {};\n const lines = content.split(/\\r?\\n/);\n\n for (let line of lines) {\n // Remove comments (lines starting with # or lines with # after value)\n line = line.trim();\n if (!line || line.startsWith(\"#\")) continue;\n\n // Match KEY=VALUE or KEY=\"VALUE\" or KEY='VALUE'\n const match = line.match(/^([^=]+?)\\s*=\\s*(.*)$/);\n if (!match) continue;\n\n const key = match[1].trim();\n let value = match[2].trim();\n\n // Handle quoted values\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n // Unescape common escape sequences for double-quoted strings\n if (value.startsWith('\"')) {\n value = value\n .replace(/\\\\n/g, \"\\n\")\n .replace(/\\\\r/g, \"\\r\")\n .replace(/\\\\t/g, \"\\t\")\n .replace(/\\\\\"/g, '\"')\n .replace(/\\\\\\\\/g, \"\\\\\");\n }\n } else {\n // Remove inline comments for unquoted values\n const commentIdx = value.indexOf(\" #\");\n if (commentIdx !== -1) {\n value = value.slice(0, commentIdx).trim();\n }\n }\n\n result[key] = value;\n }\n\n return result;\n}\n\n/**\n * Load a dotenv file from disk\n * @param {string} filePath - Path to the .env file\n * @returns {Promise<Record<string, string>>}\n */\nexport async function loadDotenv(filePath) {\n const content = await fs.readFile(path.resolve(filePath), \"utf8\");\n return parseDotenv(content);\n}\n\n/**\n * Load multiple dotenv files, with later files overriding earlier ones\n * @param {string[]} filePaths - Paths to .env files\n * @returns {Promise<Record<string, string>>}\n */\nexport async function loadDotenvFiles(filePaths) {\n const result = {};\n\n for (const filePath of filePaths) {\n try {\n const vars = await loadDotenv(filePath);\n Object.assign(result, vars);\n } catch (err) {\n // File doesn't exist - silently skip\n if (err.code !== \"ENOENT\") throw err;\n }\n }\n\n return result;\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,uBAAiB;AAOV,SAAS,YAAY,SAAS;AACnC,QAAM,SAAS,CAAC;AAChB,QAAM,QAAQ,QAAQ,MAAM,OAAO;AAEnC,WAAS,QAAQ,OAAO;AAEtB,WAAO,KAAK,KAAK;AACjB,QAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AAGnC,UAAM,QAAQ,KAAK,MAAM,uBAAuB;AAChD,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,QAAI,QAAQ,MAAM,CAAC,EAAE,KAAK;AAG1B,QAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,cAAQ,MAAM,MAAM,GAAG,EAAE;AAEzB,UAAI,MAAM,WAAW,GAAG,GAAG;AACzB,gBAAQ,MACL,QAAQ,QAAQ,IAAI,EACpB,QAAQ,QAAQ,IAAI,EACpB,QAAQ,QAAQ,GAAI,EACpB,QAAQ,QAAQ,GAAG,EACnB,QAAQ,SAAS,IAAI;AAAA,MAC1B;AAAA,IACF,OAAO;AAEL,YAAM,aAAa,MAAM,QAAQ,IAAI;AACrC,UAAI,eAAe,IAAI;AACrB,gBAAQ,MAAM,MAAM,GAAG,UAAU,EAAE,KAAK;AAAA,MAC1C;AAAA,IACF;AAEA,WAAO,GAAG,IAAI;AAAA,EAChB;AAEA,SAAO;AACT;AAOA,eAAsB,WAAW,UAAU;AACzC,QAAM,UAAU,MAAM,gBAAAA,QAAG,SAAS,iBAAAC,QAAK,QAAQ,QAAQ,GAAG,MAAM;AAChE,SAAO,YAAY,OAAO;AAC5B;AAOA,eAAsB,gBAAgB,WAAW;AAC/C,QAAM,SAAS,CAAC;AAEhB,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,YAAM,OAAO,MAAM,WAAW,QAAQ;AACtC,aAAO,OAAO,QAAQ,IAAI;AAAA,IAC5B,SAAS,KAAK;AAEZ,UAAI,IAAI,SAAS,SAAU,OAAM;AAAA,IACnC;AAAA,EACF;AAEA,SAAO;AACT;",
|
|
6
|
+
"names": ["fs", "path"]
|
|
7
|
+
}
|
package/dist/cjs/index.js
CHANGED
|
@@ -35,6 +35,9 @@ __export(index_exports, {
|
|
|
35
35
|
dryRunIncludes: () => dryRunIncludes,
|
|
36
36
|
lintColony: () => lintColony,
|
|
37
37
|
loadColony: () => loadColony,
|
|
38
|
+
loadDotenv: () => import_dotenv2.loadDotenv,
|
|
39
|
+
loadDotenvFiles: () => import_dotenv2.loadDotenvFiles,
|
|
40
|
+
parseDotenv: () => import_dotenv2.parseDotenv,
|
|
38
41
|
registerSecretProvider: () => import_secrets.registerSecretProvider,
|
|
39
42
|
unregisterSecretProvider: () => import_secrets.unregisterSecretProvider,
|
|
40
43
|
validateColony: () => validateColony
|
|
@@ -46,9 +49,11 @@ var import_fast_glob = __toESM(require("fast-glob"), 1);
|
|
|
46
49
|
var import_parser = require("./parser.js");
|
|
47
50
|
var import_resolver = require("./resolver.js");
|
|
48
51
|
var import_secrets = require("./secrets.js");
|
|
52
|
+
var import_dotenv = require("./dotenv.js");
|
|
49
53
|
var import_aws = require("./providers/aws.js");
|
|
50
54
|
var import_vault = require("./providers/vault.js");
|
|
51
55
|
var import_openbao = require("./providers/openbao.js");
|
|
56
|
+
var import_dotenv2 = require("./dotenv.js");
|
|
52
57
|
async function loadColony(opts) {
|
|
53
58
|
const entry = opts?.entry;
|
|
54
59
|
if (!entry) throw new Error("loadColony: opts.entry is required");
|
|
@@ -79,11 +84,25 @@ async function loadColony(opts) {
|
|
|
79
84
|
...opts.ctx
|
|
80
85
|
};
|
|
81
86
|
const vars = { ROOT: process.cwd(), ...opts.vars ?? {} };
|
|
87
|
+
let env = null;
|
|
88
|
+
if (opts.dotenv) {
|
|
89
|
+
let dotenvPaths;
|
|
90
|
+
if (opts.dotenv === true) {
|
|
91
|
+
dotenvPaths = [".env", ".env.local"];
|
|
92
|
+
} else if (typeof opts.dotenv === "string") {
|
|
93
|
+
dotenvPaths = [opts.dotenv];
|
|
94
|
+
} else if (Array.isArray(opts.dotenv)) {
|
|
95
|
+
dotenvPaths = opts.dotenv;
|
|
96
|
+
}
|
|
97
|
+
if (dotenvPaths) {
|
|
98
|
+
env = await (0, import_dotenv.loadDotenvFiles)(dotenvPaths);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
82
101
|
const requires = parsed.flatMap((p) => p.requires ?? []);
|
|
83
102
|
const allRules = parsed.flatMap((p) => p.rules);
|
|
84
103
|
const allowedEnvVars = sandbox.allowedEnvVars ?? null;
|
|
85
104
|
const allowedVars = sandbox.allowedVars ?? null;
|
|
86
|
-
let cfg = (0, import_resolver.resolveRules)({ rules: allRules, dims, ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
105
|
+
let cfg = (0, import_resolver.resolveRules)({ rules: allRules, dims, ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
87
106
|
const secretsOpts = opts.secrets ?? {};
|
|
88
107
|
if (secretsOpts.providers?.length || (0, import_secrets.hasGlobalProviders)()) {
|
|
89
108
|
const cacheOpts = secretsOpts.cache ?? {};
|
|
@@ -376,6 +395,9 @@ async function lintColony(opts) {
|
|
|
376
395
|
dryRunIncludes,
|
|
377
396
|
lintColony,
|
|
378
397
|
loadColony,
|
|
398
|
+
loadDotenv,
|
|
399
|
+
loadDotenvFiles,
|
|
400
|
+
parseDotenv,
|
|
379
401
|
registerSecretProvider,
|
|
380
402
|
unregisterSecretProvider,
|
|
381
403
|
validateColony
|
package/dist/cjs/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/index.js"],
|
|
4
|
-
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport fg from \"fast-glob\";\nimport { parseColony } from \"./parser.js\";\nimport { resolveRules } from \"./resolver.js\";\nimport {\n applySecretsDeep,\n SecretCache,\n hasGlobalProviders,\n registerSecretProvider,\n unregisterSecretProvider,\n clearSecretProviders,\n} from \"./secrets.js\";\n\n// Re-export secrets functions\nexport { registerSecretProvider, unregisterSecretProvider, clearSecretProviders };\n\n// Re-export providers\nexport { AwsSecretsProvider } from \"./providers/aws.js\";\nexport { VaultProvider } from \"./providers/vault.js\";\nexport { OpenBaoProvider } from \"./providers/openbao.js\";\n\n/**\n * @param {object} opts\n * @param {string} opts.entry\n * @param {string[]=} opts.dims\n * @param {Record<string,string>=} opts.ctx\n * @param {Record<string,string>=} opts.vars\n * @param {(cfg: any) => any=} opts.schema // optional validation hook (e.g. zod.parse)\n * @param {object=} opts.sandbox // security options\n * @param {string=} opts.sandbox.basePath // restrict includes to this directory\n * @param {string[]=} opts.sandbox.allowedEnvVars // whitelist of allowed env vars (null = allow all)\n * @param {number=} opts.sandbox.maxIncludeDepth // max depth for includes (default 50)\n * @param {boolean=} opts.warnOnSkippedIncludes // warn when skipping already-visited includes\n * @param {object=} opts.secrets // secrets provider options\n * @param {Array=} opts.secrets.providers // secret providers (e.g. AwsSecretsProvider)\n * @param {string[]=} opts.secrets.allowedSecrets // whitelist of allowed secret patterns\n * @param {object=} opts.secrets.cache // cache options\n * @param {string=} opts.secrets.onNotFound // 'empty' | 'warn' | 'error' (default: 'warn')\n * @returns {Promise<object>}\n */\nexport async function loadColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"loadColony: opts.entry is required\");\n\n const sandbox = opts.sandbox ?? {};\n const basePath = sandbox.basePath ? path.resolve(sandbox.basePath) : null;\n const maxIncludeDepth = sandbox.maxIncludeDepth ?? 50;\n const maxFileSize = sandbox.maxFileSize ?? null;\n const warnOnSkippedIncludes = opts.warnOnSkippedIncludes ?? false;\n\n const visited = new Set();\n const warnings = [];\n const files = await expandIncludes(entry, visited, {\n basePath,\n maxIncludeDepth,\n maxFileSize,\n warnOnSkippedIncludes,\n warnings,\n });\n\n const parsed = [];\n for (const file of files) {\n const text = await fs.readFile(file, \"utf8\");\n parsed.push(parseColony(text, { filePath: file }));\n }\n\n const dims =\n (Array.isArray(opts.dims) && opts.dims.length ? opts.dims : null) ??\n parsed.find((p) => p.dims?.length)?.dims ??\n [\"env\"];\n\n // ctx precedence: opts.ctx overrides, else @envDefaults, else sensible defaults\n const envDefaults = mergeEnvDefaults(parsed.map((p) => p.envDefaults ?? {}));\n const ctx = {\n ...envDefaults,\n env: process.env.NODE_ENV ?? \"dev\",\n ...opts.ctx,\n };\n\n const vars = { ROOT: process.cwd(), ...(opts.vars ?? {}) };\n\n // Collect requires from all parsed files\n const requires = parsed.flatMap((p) => p.requires ?? []);\n\n const allRules = parsed.flatMap((p) => p.rules);\n\n const allowedEnvVars = sandbox.allowedEnvVars ?? null;\n const allowedVars = sandbox.allowedVars ?? null;\n let cfg = resolveRules({ rules: allRules, dims, ctx, vars, allowedEnvVars, allowedVars, warnings });\n\n // Apply secrets if providers are configured\n const secretsOpts = opts.secrets ?? {};\n if (secretsOpts.providers?.length || hasGlobalProviders()) {\n const cacheOpts = secretsOpts.cache ?? {};\n const cache = cacheOpts.enabled !== false\n ? new SecretCache(cacheOpts.maxSize ?? 100)\n : null;\n\n const secretified = await applySecretsDeep(cfg, {\n providers: secretsOpts.providers ?? [],\n allowedSecrets: secretsOpts.allowedSecrets ?? null,\n cache,\n cacheTtl: cacheOpts.ttl ?? 300000,\n onNotFound: secretsOpts.onNotFound ?? \"warn\",\n warnings,\n });\n\n // Copy config methods to new object\n copyConfigMethods(secretified, cfg, warnings);\n cfg = secretified;\n }\n\n // Enforce @require after resolution\n const missing = [];\n for (const reqKey of requires) {\n if (cfg.get(reqKey) === undefined) missing.push(reqKey);\n }\n if (missing.length) {\n throw new Error(\n `COLONY @require failed (missing keys):\\n` +\n missing.map((k) => ` - ${k}`).join(\"\\n\")\n );\n }\n\n // Attach warnings as non-enumerable\n Object.defineProperty(cfg, \"_warnings\", { enumerable: false, value: warnings });\n\n // Optional schema validation hook (supports both sync and async)\n if (typeof opts.schema === \"function\") {\n const result = opts.schema(cfg);\n\n // Handle async schema validators (e.g., async Zod, Joi)\n if (result && typeof result.then === \"function\") {\n const validated = await result;\n if (validated && validated !== cfg) {\n copyConfigMethods(validated, cfg, warnings);\n return validated;\n }\n } else if (result && result !== cfg) {\n copyConfigMethods(result, cfg, warnings);\n return result;\n }\n }\n\n return cfg;\n}\n\n/**\n * Copy non-enumerable config methods to validated object\n */\nfunction copyConfigMethods(target, source, warnings) {\n Object.defineProperties(target, {\n get: { enumerable: false, value: source.get },\n explain: { enumerable: false, value: source.explain },\n toJSON: { enumerable: false, value: source.toJSON },\n keys: { enumerable: false, value: source.keys },\n diff: { enumerable: false, value: source.diff },\n _trace: { enumerable: false, value: source._trace },\n _warnings: { enumerable: false, value: warnings },\n });\n}\n\nfunction mergeEnvDefaults(list) {\n const out = {};\n for (const m of list) {\n for (const [k, v] of Object.entries(m)) out[k] = v;\n }\n return out;\n}\n\nasync function expandIncludes(entry, visited, { basePath, maxIncludeDepth, maxFileSize, warnOnSkippedIncludes, warnings }) {\n const absEntry = path.resolve(entry);\n const out = [];\n await dfs(absEntry, 0);\n return out;\n\n async function dfs(file, depth) {\n if (depth > maxIncludeDepth) {\n throw new Error(`COLONY: Max include depth (${maxIncludeDepth}) exceeded at: ${file}`);\n }\n\n const abs = path.resolve(file);\n\n if (visited.has(abs)) {\n if (warnOnSkippedIncludes) {\n warnings.push({ type: \"skipped_include\", file: abs, message: `Skipping already-visited include: ${abs}` });\n }\n return;\n }\n visited.add(abs);\n\n // Check file size if limit is set\n if (maxFileSize !== null) {\n const stat = await fs.stat(abs);\n if (stat.size > maxFileSize) {\n throw new Error(`COLONY: File size (${stat.size} bytes) exceeds maxFileSize (${maxFileSize} bytes): ${abs}`);\n }\n }\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n\n // Security: validate path is within basePath if set\n if (basePath !== null) {\n const normalizedInc = path.normalize(incAbs);\n if (!normalizedInc.startsWith(basePath + path.sep) && normalizedInc !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Include \"${inc}\" resolves to \"${normalizedInc}\" which is outside basePath \"${basePath}\"`\n );\n }\n }\n\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n // Sort alphabetically for deterministic ordering across platforms/filesystems\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n // Also validate glob matches against basePath\n if (basePath !== null) {\n const normalizedMatch = path.normalize(m);\n if (!normalizedMatch.startsWith(basePath + path.sep) && normalizedMatch !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Glob match \"${m}\" is outside basePath \"${basePath}\"`\n );\n }\n }\n await dfs(m, depth + 1);\n }\n }\n\n out.push(abs);\n }\n}\n\n/**\n * Validate syntax of colony files without resolving\n * @param {string} entry - Entry file path\n * @returns {Promise<{valid: boolean, files: string[], errors: Array<{file: string, error: string}>}>}\n */\nexport async function validateColony(entry) {\n const visited = new Set();\n const files = [];\n const errors = [];\n\n await validateDfs(path.resolve(entry));\n\n return {\n valid: errors.length === 0,\n files,\n errors,\n };\n\n async function validateDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs });\n files.push(abs);\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await validateDfs(m);\n }\n }\n } catch (e) {\n errors.push({ file: abs, error: e.message });\n }\n }\n}\n\n/**\n * Dry-run: list all files that would be included\n * @param {string} entry - Entry file path\n * @returns {Promise<string[]>}\n */\nexport async function dryRunIncludes(entry) {\n const visited = new Set();\n const files = [];\n await dryRunDfs(path.resolve(entry));\n return files;\n\n async function dryRunDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await dryRunDfs(m);\n }\n }\n\n files.push(abs);\n }\n}\n\n/**\n * Compare two configs loaded with different contexts\n * @param {object} opts - Same options as loadColony, but with ctx1 and ctx2\n * @param {Record<string,string>} opts.ctx1 - First context\n * @param {Record<string,string>} opts.ctx2 - Second context\n * @returns {Promise<{cfg1: object, cfg2: object, diff: object}>}\n */\nexport async function diffColony(opts) {\n const { ctx1, ctx2, ...baseOpts } = opts;\n\n if (!ctx1 || !ctx2) {\n throw new Error(\"diffColony: both ctx1 and ctx2 are required\");\n }\n\n const cfg1 = await loadColony({ ...baseOpts, ctx: ctx1 });\n const cfg2 = await loadColony({ ...baseOpts, ctx: ctx2 });\n\n return {\n cfg1,\n cfg2,\n diff: cfg1.diff(cfg2),\n };\n}\n\n/**\n * Lint colony files for potential issues\n * @param {object} opts\n * @param {string} opts.entry - Entry file path\n * @param {string[]=} opts.dims - Dimension names\n * @returns {Promise<{issues: Array<{type: string, severity: string, message: string, file?: string, line?: number}>}>}\n */\nexport async function lintColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"lintColony: opts.entry is required\");\n\n const issues = [];\n const visited = new Set();\n const allRules = [];\n const allFiles = [];\n let foundDims = null;\n\n // Collect all rules from all files\n await collectRules(path.resolve(entry));\n\n async function collectRules(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const parsed = parseColony(text, { filePath: abs });\n allFiles.push(abs);\n\n // Capture dims from first file that has them\n if (!foundDims && parsed.dims?.length) {\n foundDims = parsed.dims;\n }\n\n for (const rule of parsed.rules) {\n allRules.push({ ...rule, filePath: abs });\n }\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await collectRules(m);\n }\n }\n } catch (e) {\n issues.push({\n type: \"parse_error\",\n severity: \"error\",\n message: e.message,\n file: abs,\n });\n }\n }\n\n // Get dims from options, or from parsed files, or default\n const dims = opts.dims ?? foundDims ?? [\"env\"];\n\n // Check for shadowed rules (same key, same scope, different values)\n const rulesByKey = new Map();\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length).join(\".\");\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n const key = `${scope}|${keyPath}`;\n\n if (!rulesByKey.has(key)) {\n rulesByKey.set(key, []);\n }\n rulesByKey.get(key).push(rule);\n }\n\n for (const [key, rules] of rulesByKey.entries()) {\n if (rules.length > 1) {\n // Check if they're in different files or same file\n const locations = rules.map((r) => `${r.filePath}:${r.line}`);\n const uniqueLocations = new Set(locations);\n\n if (uniqueLocations.size > 1) {\n const [scope, keyPath] = key.split(\"|\");\n issues.push({\n type: \"shadowed_rule\",\n severity: \"warning\",\n message: `Rule \"${scope}.${keyPath}\" is defined ${rules.length} times. Later rule wins.`,\n file: rules[rules.length - 1].filePath,\n line: rules[rules.length - 1].line,\n });\n }\n }\n }\n\n // Check for potentially unused wildcard rules\n // (rules with all wildcards that might be overridden by more specific rules)\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length);\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n\n if (scope.every((s) => s === \"*\")) {\n // Check if there are more specific rules for the same key\n const moreSpecific = allRules.filter((r) => {\n const rKeyPath = r.keySegments.slice(dims.length).join(\".\");\n if (rKeyPath !== keyPath) return false;\n const rScope = r.keySegments.slice(0, dims.length);\n return rScope.some((s) => s !== \"*\") && r !== rule;\n });\n\n if (moreSpecific.length > 0) {\n issues.push({\n type: \"overridden_wildcard\",\n severity: \"info\",\n message: `Wildcard rule for \"${keyPath}\" is overridden by ${moreSpecific.length} more specific rule(s)`,\n file: rule.filePath,\n line: rule.line,\n });\n }\n }\n }\n\n // Check for empty includes\n for (const file of allFiles) {\n try {\n const text = await fs.readFile(file, \"utf8\");\n const parsed = parseColony(text, { filePath: file });\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(file), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n if (matches.length === 0) {\n issues.push({\n type: \"empty_include\",\n severity: \"warning\",\n message: `Include pattern \"${inc}\" matches no files`,\n file,\n });\n }\n }\n } catch {}\n }\n\n return { issues };\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,uBAAiB;AACjB,uBAAe;AACf,oBAA4B;AAC5B,sBAA6B;AAC7B,qBAOO;
|
|
6
|
-
"names": ["path", "fs", "fg"]
|
|
4
|
+
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport fg from \"fast-glob\";\nimport { parseColony } from \"./parser.js\";\nimport { resolveRules } from \"./resolver.js\";\nimport {\n applySecretsDeep,\n SecretCache,\n hasGlobalProviders,\n registerSecretProvider,\n unregisterSecretProvider,\n clearSecretProviders,\n} from \"./secrets.js\";\nimport { loadDotenvFiles, parseDotenv } from \"./dotenv.js\";\n\n// Re-export secrets functions\nexport { registerSecretProvider, unregisterSecretProvider, clearSecretProviders };\n\n// Re-export providers\nexport { AwsSecretsProvider } from \"./providers/aws.js\";\nexport { VaultProvider } from \"./providers/vault.js\";\nexport { OpenBaoProvider } from \"./providers/openbao.js\";\n\n// Re-export dotenv functions\nexport { parseDotenv, loadDotenv, loadDotenvFiles } from \"./dotenv.js\";\n\n/**\n * @param {object} opts\n * @param {string} opts.entry\n * @param {string[]=} opts.dims\n * @param {Record<string,string>=} opts.ctx\n * @param {Record<string,string>=} opts.vars\n * @param {(cfg: any) => any=} opts.schema // optional validation hook (e.g. zod.parse)\n * @param {string|string[]|boolean=} opts.dotenv // dotenv file path(s), or true for ['.env', '.env.local']\n * @param {object=} opts.sandbox // security options\n * @param {string=} opts.sandbox.basePath // restrict includes to this directory\n * @param {string[]=} opts.sandbox.allowedEnvVars // whitelist of allowed env vars (null = allow all)\n * @param {number=} opts.sandbox.maxIncludeDepth // max depth for includes (default 50)\n * @param {boolean=} opts.warnOnSkippedIncludes // warn when skipping already-visited includes\n * @param {object=} opts.secrets // secrets provider options\n * @param {Array=} opts.secrets.providers // secret providers (e.g. AwsSecretsProvider)\n * @param {string[]=} opts.secrets.allowedSecrets // whitelist of allowed secret patterns\n * @param {object=} opts.secrets.cache // cache options\n * @param {string=} opts.secrets.onNotFound // 'empty' | 'warn' | 'error' (default: 'warn')\n * @returns {Promise<object>}\n */\nexport async function loadColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"loadColony: opts.entry is required\");\n\n const sandbox = opts.sandbox ?? {};\n const basePath = sandbox.basePath ? path.resolve(sandbox.basePath) : null;\n const maxIncludeDepth = sandbox.maxIncludeDepth ?? 50;\n const maxFileSize = sandbox.maxFileSize ?? null;\n const warnOnSkippedIncludes = opts.warnOnSkippedIncludes ?? false;\n\n const visited = new Set();\n const warnings = [];\n const files = await expandIncludes(entry, visited, {\n basePath,\n maxIncludeDepth,\n maxFileSize,\n warnOnSkippedIncludes,\n warnings,\n });\n\n const parsed = [];\n for (const file of files) {\n const text = await fs.readFile(file, \"utf8\");\n parsed.push(parseColony(text, { filePath: file }));\n }\n\n const dims =\n (Array.isArray(opts.dims) && opts.dims.length ? opts.dims : null) ??\n parsed.find((p) => p.dims?.length)?.dims ??\n [\"env\"];\n\n // ctx precedence: opts.ctx overrides, else @envDefaults, else sensible defaults\n const envDefaults = mergeEnvDefaults(parsed.map((p) => p.envDefaults ?? {}));\n const ctx = {\n ...envDefaults,\n env: process.env.NODE_ENV ?? \"dev\",\n ...opts.ctx,\n };\n\n const vars = { ROOT: process.cwd(), ...(opts.vars ?? {}) };\n\n // Load dotenv files if configured\n let env = null;\n if (opts.dotenv) {\n let dotenvPaths;\n if (opts.dotenv === true) {\n dotenvPaths = [\".env\", \".env.local\"];\n } else if (typeof opts.dotenv === \"string\") {\n dotenvPaths = [opts.dotenv];\n } else if (Array.isArray(opts.dotenv)) {\n dotenvPaths = opts.dotenv;\n }\n if (dotenvPaths) {\n env = await loadDotenvFiles(dotenvPaths);\n }\n }\n\n // Collect requires from all parsed files\n const requires = parsed.flatMap((p) => p.requires ?? []);\n\n const allRules = parsed.flatMap((p) => p.rules);\n\n const allowedEnvVars = sandbox.allowedEnvVars ?? null;\n const allowedVars = sandbox.allowedVars ?? null;\n let cfg = resolveRules({ rules: allRules, dims, ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n\n // Apply secrets if providers are configured\n const secretsOpts = opts.secrets ?? {};\n if (secretsOpts.providers?.length || hasGlobalProviders()) {\n const cacheOpts = secretsOpts.cache ?? {};\n const cache = cacheOpts.enabled !== false\n ? new SecretCache(cacheOpts.maxSize ?? 100)\n : null;\n\n const secretified = await applySecretsDeep(cfg, {\n providers: secretsOpts.providers ?? [],\n allowedSecrets: secretsOpts.allowedSecrets ?? null,\n cache,\n cacheTtl: cacheOpts.ttl ?? 300000,\n onNotFound: secretsOpts.onNotFound ?? \"warn\",\n warnings,\n });\n\n // Copy config methods to new object\n copyConfigMethods(secretified, cfg, warnings);\n cfg = secretified;\n }\n\n // Enforce @require after resolution\n const missing = [];\n for (const reqKey of requires) {\n if (cfg.get(reqKey) === undefined) missing.push(reqKey);\n }\n if (missing.length) {\n throw new Error(\n `COLONY @require failed (missing keys):\\n` +\n missing.map((k) => ` - ${k}`).join(\"\\n\")\n );\n }\n\n // Attach warnings as non-enumerable\n Object.defineProperty(cfg, \"_warnings\", { enumerable: false, value: warnings });\n\n // Optional schema validation hook (supports both sync and async)\n if (typeof opts.schema === \"function\") {\n const result = opts.schema(cfg);\n\n // Handle async schema validators (e.g., async Zod, Joi)\n if (result && typeof result.then === \"function\") {\n const validated = await result;\n if (validated && validated !== cfg) {\n copyConfigMethods(validated, cfg, warnings);\n return validated;\n }\n } else if (result && result !== cfg) {\n copyConfigMethods(result, cfg, warnings);\n return result;\n }\n }\n\n return cfg;\n}\n\n/**\n * Copy non-enumerable config methods to validated object\n */\nfunction copyConfigMethods(target, source, warnings) {\n Object.defineProperties(target, {\n get: { enumerable: false, value: source.get },\n explain: { enumerable: false, value: source.explain },\n toJSON: { enumerable: false, value: source.toJSON },\n keys: { enumerable: false, value: source.keys },\n diff: { enumerable: false, value: source.diff },\n _trace: { enumerable: false, value: source._trace },\n _warnings: { enumerable: false, value: warnings },\n });\n}\n\nfunction mergeEnvDefaults(list) {\n const out = {};\n for (const m of list) {\n for (const [k, v] of Object.entries(m)) out[k] = v;\n }\n return out;\n}\n\nasync function expandIncludes(entry, visited, { basePath, maxIncludeDepth, maxFileSize, warnOnSkippedIncludes, warnings }) {\n const absEntry = path.resolve(entry);\n const out = [];\n await dfs(absEntry, 0);\n return out;\n\n async function dfs(file, depth) {\n if (depth > maxIncludeDepth) {\n throw new Error(`COLONY: Max include depth (${maxIncludeDepth}) exceeded at: ${file}`);\n }\n\n const abs = path.resolve(file);\n\n if (visited.has(abs)) {\n if (warnOnSkippedIncludes) {\n warnings.push({ type: \"skipped_include\", file: abs, message: `Skipping already-visited include: ${abs}` });\n }\n return;\n }\n visited.add(abs);\n\n // Check file size if limit is set\n if (maxFileSize !== null) {\n const stat = await fs.stat(abs);\n if (stat.size > maxFileSize) {\n throw new Error(`COLONY: File size (${stat.size} bytes) exceeds maxFileSize (${maxFileSize} bytes): ${abs}`);\n }\n }\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n\n // Security: validate path is within basePath if set\n if (basePath !== null) {\n const normalizedInc = path.normalize(incAbs);\n if (!normalizedInc.startsWith(basePath + path.sep) && normalizedInc !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Include \"${inc}\" resolves to \"${normalizedInc}\" which is outside basePath \"${basePath}\"`\n );\n }\n }\n\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n // Sort alphabetically for deterministic ordering across platforms/filesystems\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n // Also validate glob matches against basePath\n if (basePath !== null) {\n const normalizedMatch = path.normalize(m);\n if (!normalizedMatch.startsWith(basePath + path.sep) && normalizedMatch !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Glob match \"${m}\" is outside basePath \"${basePath}\"`\n );\n }\n }\n await dfs(m, depth + 1);\n }\n }\n\n out.push(abs);\n }\n}\n\n/**\n * Validate syntax of colony files without resolving\n * @param {string} entry - Entry file path\n * @returns {Promise<{valid: boolean, files: string[], errors: Array<{file: string, error: string}>}>}\n */\nexport async function validateColony(entry) {\n const visited = new Set();\n const files = [];\n const errors = [];\n\n await validateDfs(path.resolve(entry));\n\n return {\n valid: errors.length === 0,\n files,\n errors,\n };\n\n async function validateDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs });\n files.push(abs);\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await validateDfs(m);\n }\n }\n } catch (e) {\n errors.push({ file: abs, error: e.message });\n }\n }\n}\n\n/**\n * Dry-run: list all files that would be included\n * @param {string} entry - Entry file path\n * @returns {Promise<string[]>}\n */\nexport async function dryRunIncludes(entry) {\n const visited = new Set();\n const files = [];\n await dryRunDfs(path.resolve(entry));\n return files;\n\n async function dryRunDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await dryRunDfs(m);\n }\n }\n\n files.push(abs);\n }\n}\n\n/**\n * Compare two configs loaded with different contexts\n * @param {object} opts - Same options as loadColony, but with ctx1 and ctx2\n * @param {Record<string,string>} opts.ctx1 - First context\n * @param {Record<string,string>} opts.ctx2 - Second context\n * @returns {Promise<{cfg1: object, cfg2: object, diff: object}>}\n */\nexport async function diffColony(opts) {\n const { ctx1, ctx2, ...baseOpts } = opts;\n\n if (!ctx1 || !ctx2) {\n throw new Error(\"diffColony: both ctx1 and ctx2 are required\");\n }\n\n const cfg1 = await loadColony({ ...baseOpts, ctx: ctx1 });\n const cfg2 = await loadColony({ ...baseOpts, ctx: ctx2 });\n\n return {\n cfg1,\n cfg2,\n diff: cfg1.diff(cfg2),\n };\n}\n\n/**\n * Lint colony files for potential issues\n * @param {object} opts\n * @param {string} opts.entry - Entry file path\n * @param {string[]=} opts.dims - Dimension names\n * @returns {Promise<{issues: Array<{type: string, severity: string, message: string, file?: string, line?: number}>}>}\n */\nexport async function lintColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"lintColony: opts.entry is required\");\n\n const issues = [];\n const visited = new Set();\n const allRules = [];\n const allFiles = [];\n let foundDims = null;\n\n // Collect all rules from all files\n await collectRules(path.resolve(entry));\n\n async function collectRules(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const parsed = parseColony(text, { filePath: abs });\n allFiles.push(abs);\n\n // Capture dims from first file that has them\n if (!foundDims && parsed.dims?.length) {\n foundDims = parsed.dims;\n }\n\n for (const rule of parsed.rules) {\n allRules.push({ ...rule, filePath: abs });\n }\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await collectRules(m);\n }\n }\n } catch (e) {\n issues.push({\n type: \"parse_error\",\n severity: \"error\",\n message: e.message,\n file: abs,\n });\n }\n }\n\n // Get dims from options, or from parsed files, or default\n const dims = opts.dims ?? foundDims ?? [\"env\"];\n\n // Check for shadowed rules (same key, same scope, different values)\n const rulesByKey = new Map();\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length).join(\".\");\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n const key = `${scope}|${keyPath}`;\n\n if (!rulesByKey.has(key)) {\n rulesByKey.set(key, []);\n }\n rulesByKey.get(key).push(rule);\n }\n\n for (const [key, rules] of rulesByKey.entries()) {\n if (rules.length > 1) {\n // Check if they're in different files or same file\n const locations = rules.map((r) => `${r.filePath}:${r.line}`);\n const uniqueLocations = new Set(locations);\n\n if (uniqueLocations.size > 1) {\n const [scope, keyPath] = key.split(\"|\");\n issues.push({\n type: \"shadowed_rule\",\n severity: \"warning\",\n message: `Rule \"${scope}.${keyPath}\" is defined ${rules.length} times. Later rule wins.`,\n file: rules[rules.length - 1].filePath,\n line: rules[rules.length - 1].line,\n });\n }\n }\n }\n\n // Check for potentially unused wildcard rules\n // (rules with all wildcards that might be overridden by more specific rules)\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length);\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n\n if (scope.every((s) => s === \"*\")) {\n // Check if there are more specific rules for the same key\n const moreSpecific = allRules.filter((r) => {\n const rKeyPath = r.keySegments.slice(dims.length).join(\".\");\n if (rKeyPath !== keyPath) return false;\n const rScope = r.keySegments.slice(0, dims.length);\n return rScope.some((s) => s !== \"*\") && r !== rule;\n });\n\n if (moreSpecific.length > 0) {\n issues.push({\n type: \"overridden_wildcard\",\n severity: \"info\",\n message: `Wildcard rule for \"${keyPath}\" is overridden by ${moreSpecific.length} more specific rule(s)`,\n file: rule.filePath,\n line: rule.line,\n });\n }\n }\n }\n\n // Check for empty includes\n for (const file of allFiles) {\n try {\n const text = await fs.readFile(file, \"utf8\");\n const parsed = parseColony(text, { filePath: file });\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(file), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n if (matches.length === 0) {\n issues.push({\n type: \"empty_include\",\n severity: \"warning\",\n message: `Include pattern \"${inc}\" matches no files`,\n file,\n });\n }\n }\n } catch {}\n }\n\n return { issues };\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAe;AACf,uBAAiB;AACjB,uBAAe;AACf,oBAA4B;AAC5B,sBAA6B;AAC7B,qBAOO;AACP,oBAA6C;AAM7C,iBAAmC;AACnC,mBAA8B;AAC9B,qBAAgC;AAGhC,IAAAA,iBAAyD;AAsBzD,eAAsB,WAAW,MAAM;AACrC,QAAM,QAAQ,MAAM;AACpB,MAAI,CAAC,MAAO,OAAM,IAAI,MAAM,oCAAoC;AAEhE,QAAM,UAAU,KAAK,WAAW,CAAC;AACjC,QAAM,WAAW,QAAQ,WAAW,iBAAAC,QAAK,QAAQ,QAAQ,QAAQ,IAAI;AACrE,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,wBAAwB,KAAK,yBAAyB;AAE5D,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,WAAW,CAAC;AAClB,QAAM,QAAQ,MAAM,eAAe,OAAO,SAAS;AAAA,IACjD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SAAS,CAAC;AAChB,aAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,gBAAAC,QAAG,SAAS,MAAM,MAAM;AAC3C,WAAO,SAAK,2BAAY,MAAM,EAAE,UAAU,KAAK,CAAC,CAAC;AAAA,EACnD;AAEA,QAAM,QACH,MAAM,QAAQ,KAAK,IAAI,KAAK,KAAK,KAAK,SAAS,KAAK,OAAO,SAC5D,OAAO,KAAK,CAAC,MAAM,EAAE,MAAM,MAAM,GAAG,QACpC,CAAC,KAAK;AAGR,QAAM,cAAc,iBAAiB,OAAO,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC;AAC3E,QAAM,MAAM;AAAA,IACV,GAAG;AAAA,IACH,KAAK,QAAQ,IAAI,YAAY;AAAA,IAC7B,GAAG,KAAK;AAAA,EACV;AAEA,QAAM,OAAO,EAAE,MAAM,QAAQ,IAAI,GAAG,GAAI,KAAK,QAAQ,CAAC,EAAG;AAGzD,MAAI,MAAM;AACV,MAAI,KAAK,QAAQ;AACf,QAAI;AACJ,QAAI,KAAK,WAAW,MAAM;AACxB,oBAAc,CAAC,QAAQ,YAAY;AAAA,IACrC,WAAW,OAAO,KAAK,WAAW,UAAU;AAC1C,oBAAc,CAAC,KAAK,MAAM;AAAA,IAC5B,WAAW,MAAM,QAAQ,KAAK,MAAM,GAAG;AACrC,oBAAc,KAAK;AAAA,IACrB;AACA,QAAI,aAAa;AACf,YAAM,UAAM,+BAAgB,WAAW;AAAA,IACzC;AAAA,EACF;AAGA,QAAM,WAAW,OAAO,QAAQ,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;AAEvD,QAAM,WAAW,OAAO,QAAQ,CAAC,MAAM,EAAE,KAAK;AAE9C,QAAM,iBAAiB,QAAQ,kBAAkB;AACjD,QAAM,cAAc,QAAQ,eAAe;AAC3C,MAAI,UAAM,8BAAa,EAAE,OAAO,UAAU,MAAM,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAGvG,QAAM,cAAc,KAAK,WAAW,CAAC;AACrC,MAAI,YAAY,WAAW,cAAU,mCAAmB,GAAG;AACzD,UAAM,YAAY,YAAY,SAAS,CAAC;AACxC,UAAM,QAAQ,UAAU,YAAY,QAChC,IAAI,2BAAY,UAAU,WAAW,GAAG,IACxC;AAEJ,UAAM,cAAc,UAAM,iCAAiB,KAAK;AAAA,MAC9C,WAAW,YAAY,aAAa,CAAC;AAAA,MACrC,gBAAgB,YAAY,kBAAkB;AAAA,MAC9C;AAAA,MACA,UAAU,UAAU,OAAO;AAAA,MAC3B,YAAY,YAAY,cAAc;AAAA,MACtC;AAAA,IACF,CAAC;AAGD,sBAAkB,aAAa,KAAK,QAAQ;AAC5C,UAAM;AAAA,EACR;AAGA,QAAM,UAAU,CAAC;AACjB,aAAW,UAAU,UAAU;AAC7B,QAAI,IAAI,IAAI,MAAM,MAAM,OAAW,SAAQ,KAAK,MAAM;AAAA,EACxD;AACA,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI;AAAA,MACR;AAAA,IACA,QAAQ,IAAI,CAAC,MAAM,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI;AAAA,IAC1C;AAAA,EACF;AAGA,SAAO,eAAe,KAAK,aAAa,EAAE,YAAY,OAAO,OAAO,SAAS,CAAC;AAG9E,MAAI,OAAO,KAAK,WAAW,YAAY;AACrC,UAAM,SAAS,KAAK,OAAO,GAAG;AAG9B,QAAI,UAAU,OAAO,OAAO,SAAS,YAAY;AAC/C,YAAM,YAAY,MAAM;AACxB,UAAI,aAAa,cAAc,KAAK;AAClC,0BAAkB,WAAW,KAAK,QAAQ;AAC1C,eAAO;AAAA,MACT;AAAA,IACF,WAAW,UAAU,WAAW,KAAK;AACnC,wBAAkB,QAAQ,KAAK,QAAQ;AACvC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,kBAAkB,QAAQ,QAAQ,UAAU;AACnD,SAAO,iBAAiB,QAAQ;AAAA,IAC9B,KAAK,EAAE,YAAY,OAAO,OAAO,OAAO,IAAI;AAAA,IAC5C,SAAS,EAAE,YAAY,OAAO,OAAO,OAAO,QAAQ;AAAA,IACpD,QAAQ,EAAE,YAAY,OAAO,OAAO,OAAO,OAAO;AAAA,IAClD,MAAM,EAAE,YAAY,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9C,MAAM,EAAE,YAAY,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9C,QAAQ,EAAE,YAAY,OAAO,OAAO,OAAO,OAAO;AAAA,IAClD,WAAW,EAAE,YAAY,OAAO,OAAO,SAAS;AAAA,EAClD,CAAC;AACH;AAEA,SAAS,iBAAiB,MAAM;AAC9B,QAAM,MAAM,CAAC;AACb,aAAW,KAAK,MAAM;AACpB,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,CAAC,EAAG,KAAI,CAAC,IAAI;AAAA,EACnD;AACA,SAAO;AACT;AAEA,eAAe,eAAe,OAAO,SAAS,EAAE,UAAU,iBAAiB,aAAa,uBAAuB,SAAS,GAAG;AACzH,QAAM,WAAW,iBAAAD,QAAK,QAAQ,KAAK;AACnC,QAAM,MAAM,CAAC;AACb,QAAM,IAAI,UAAU,CAAC;AACrB,SAAO;AAEP,iBAAe,IAAI,MAAM,OAAO;AAC9B,QAAI,QAAQ,iBAAiB;AAC3B,YAAM,IAAI,MAAM,8BAA8B,eAAe,kBAAkB,IAAI,EAAE;AAAA,IACvF;AAEA,UAAM,MAAM,iBAAAA,QAAK,QAAQ,IAAI;AAE7B,QAAI,QAAQ,IAAI,GAAG,GAAG;AACpB,UAAI,uBAAuB;AACzB,iBAAS,KAAK,EAAE,MAAM,mBAAmB,MAAM,KAAK,SAAS,qCAAqC,GAAG,GAAG,CAAC;AAAA,MAC3G;AACA;AAAA,IACF;AACA,YAAQ,IAAI,GAAG;AAGf,QAAI,gBAAgB,MAAM;AACxB,YAAM,OAAO,MAAM,gBAAAC,QAAG,KAAK,GAAG;AAC9B,UAAI,KAAK,OAAO,aAAa;AAC3B,cAAM,IAAI,MAAM,sBAAsB,KAAK,IAAI,gCAAgC,WAAW,YAAY,GAAG,EAAE;AAAA,MAC7G;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,gBAAAA,QAAG,SAAS,KAAK,MAAM;AAC1C,UAAM,EAAE,SAAS,QAAI,2BAAY,MAAM,EAAE,UAAU,KAAK,qBAAqB,KAAK,CAAC;AAEnF,eAAW,OAAO,UAAU;AAC1B,YAAM,SAAS,iBAAAD,QAAK,QAAQ,iBAAAA,QAAK,QAAQ,GAAG,GAAG,GAAG;AAGlD,UAAI,aAAa,MAAM;AACrB,cAAM,gBAAgB,iBAAAA,QAAK,UAAU,MAAM;AAC3C,YAAI,CAAC,cAAc,WAAW,WAAW,iBAAAA,QAAK,GAAG,KAAK,kBAAkB,UAAU;AAChF,gBAAM,IAAI;AAAA,YACR,4CAA4C,GAAG,kBAAkB,aAAa,gCAAgC,QAAQ;AAAA,UACxH;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAU,UAAM,iBAAAE,SAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAElE,iBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAE1D,YAAI,aAAa,MAAM;AACrB,gBAAM,kBAAkB,iBAAAF,QAAK,UAAU,CAAC;AACxC,cAAI,CAAC,gBAAgB,WAAW,WAAW,iBAAAA,QAAK,GAAG,KAAK,oBAAoB,UAAU;AACpF,kBAAM,IAAI;AAAA,cACR,+CAA+C,CAAC,0BAA0B,QAAQ;AAAA,YACpF;AAAA,UACF;AAAA,QACF;AACA,cAAM,IAAI,GAAG,QAAQ,CAAC;AAAA,MACxB;AAAA,IACF;AAEA,QAAI,KAAK,GAAG;AAAA,EACd;AACF;AAOA,eAAsB,eAAe,OAAO;AAC1C,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,QAAQ,CAAC;AACf,QAAM,SAAS,CAAC;AAEhB,QAAM,YAAY,iBAAAA,QAAK,QAAQ,KAAK,CAAC;AAErC,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AAEA,iBAAe,YAAY,MAAM;AAC/B,UAAM,MAAM,iBAAAA,QAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,QAAI;AACF,YAAM,OAAO,MAAM,gBAAAC,QAAG,SAAS,KAAK,MAAM;AAC1C,YAAM,EAAE,SAAS,QAAI,2BAAY,MAAM,EAAE,UAAU,IAAI,CAAC;AACxD,YAAM,KAAK,GAAG;AAEd,iBAAW,OAAO,UAAU;AAC1B,cAAM,SAAS,iBAAAD,QAAK,QAAQ,iBAAAA,QAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,cAAM,UAAU,UAAM,iBAAAE,SAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,mBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,gBAAM,YAAY,CAAC;AAAA,QACrB;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,aAAO,KAAK,EAAE,MAAM,KAAK,OAAO,EAAE,QAAQ,CAAC;AAAA,IAC7C;AAAA,EACF;AACF;AAOA,eAAsB,eAAe,OAAO;AAC1C,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,QAAQ,CAAC;AACf,QAAM,UAAU,iBAAAF,QAAK,QAAQ,KAAK,CAAC;AACnC,SAAO;AAEP,iBAAe,UAAU,MAAM;AAC7B,UAAM,MAAM,iBAAAA,QAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,UAAM,OAAO,MAAM,gBAAAC,QAAG,SAAS,KAAK,MAAM;AAC1C,UAAM,EAAE,SAAS,QAAI,2BAAY,MAAM,EAAE,UAAU,KAAK,qBAAqB,KAAK,CAAC;AAEnF,eAAW,OAAO,UAAU;AAC1B,YAAM,SAAS,iBAAAD,QAAK,QAAQ,iBAAAA,QAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,YAAM,UAAU,UAAM,iBAAAE,SAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,iBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,cAAM,UAAU,CAAC;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,KAAK,GAAG;AAAA,EAChB;AACF;AASA,eAAsB,WAAW,MAAM;AACrC,QAAM,EAAE,MAAM,MAAM,GAAG,SAAS,IAAI;AAEpC,MAAI,CAAC,QAAQ,CAAC,MAAM;AAClB,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAEA,QAAM,OAAO,MAAM,WAAW,EAAE,GAAG,UAAU,KAAK,KAAK,CAAC;AACxD,QAAM,OAAO,MAAM,WAAW,EAAE,GAAG,UAAU,KAAK,KAAK,CAAC;AAExD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,MAAM,KAAK,KAAK,IAAI;AAAA,EACtB;AACF;AASA,eAAsB,WAAW,MAAM;AACrC,QAAM,QAAQ,MAAM;AACpB,MAAI,CAAC,MAAO,OAAM,IAAI,MAAM,oCAAoC;AAEhE,QAAM,SAAS,CAAC;AAChB,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,WAAW,CAAC;AAClB,QAAM,WAAW,CAAC;AAClB,MAAI,YAAY;AAGhB,QAAM,aAAa,iBAAAF,QAAK,QAAQ,KAAK,CAAC;AAEtC,iBAAe,aAAa,MAAM;AAChC,UAAM,MAAM,iBAAAA,QAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,QAAI;AACF,YAAM,OAAO,MAAM,gBAAAC,QAAG,SAAS,KAAK,MAAM;AAC1C,YAAM,aAAS,2BAAY,MAAM,EAAE,UAAU,IAAI,CAAC;AAClD,eAAS,KAAK,GAAG;AAGjB,UAAI,CAAC,aAAa,OAAO,MAAM,QAAQ;AACrC,oBAAY,OAAO;AAAA,MACrB;AAEA,iBAAW,QAAQ,OAAO,OAAO;AAC/B,iBAAS,KAAK,EAAE,GAAG,MAAM,UAAU,IAAI,CAAC;AAAA,MAC1C;AAEA,iBAAW,OAAO,OAAO,UAAU;AACjC,cAAM,SAAS,iBAAAD,QAAK,QAAQ,iBAAAA,QAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,cAAM,UAAU,UAAM,iBAAAE,SAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,mBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,gBAAM,aAAa,CAAC;AAAA,QACtB;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,EAAE;AAAA,QACX,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,OAAO,KAAK,QAAQ,aAAa,CAAC,KAAK;AAG7C,QAAM,aAAa,oBAAI,IAAI;AAC3B,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,KAAK,YAAY,MAAM,GAAG,KAAK,MAAM,EAAE,KAAK,GAAG;AAC7D,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAC5D,UAAM,MAAM,GAAG,KAAK,IAAI,OAAO;AAE/B,QAAI,CAAC,WAAW,IAAI,GAAG,GAAG;AACxB,iBAAW,IAAI,KAAK,CAAC,CAAC;AAAA,IACxB;AACA,eAAW,IAAI,GAAG,EAAE,KAAK,IAAI;AAAA,EAC/B;AAEA,aAAW,CAAC,KAAK,KAAK,KAAK,WAAW,QAAQ,GAAG;AAC/C,QAAI,MAAM,SAAS,GAAG;AAEpB,YAAM,YAAY,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,QAAQ,IAAI,EAAE,IAAI,EAAE;AAC5D,YAAM,kBAAkB,IAAI,IAAI,SAAS;AAEzC,UAAI,gBAAgB,OAAO,GAAG;AAC5B,cAAM,CAAC,OAAO,OAAO,IAAI,IAAI,MAAM,GAAG;AACtC,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS,SAAS,KAAK,IAAI,OAAO,gBAAgB,MAAM,MAAM;AAAA,UAC9D,MAAM,MAAM,MAAM,SAAS,CAAC,EAAE;AAAA,UAC9B,MAAM,MAAM,MAAM,SAAS,CAAC,EAAE;AAAA,QAChC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAIA,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,KAAK,YAAY,MAAM,GAAG,KAAK,MAAM;AACnD,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAE5D,QAAI,MAAM,MAAM,CAAC,MAAM,MAAM,GAAG,GAAG;AAEjC,YAAM,eAAe,SAAS,OAAO,CAAC,MAAM;AAC1C,cAAM,WAAW,EAAE,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAC1D,YAAI,aAAa,QAAS,QAAO;AACjC,cAAM,SAAS,EAAE,YAAY,MAAM,GAAG,KAAK,MAAM;AACjD,eAAO,OAAO,KAAK,CAAC,MAAM,MAAM,GAAG,KAAK,MAAM;AAAA,MAChD,CAAC;AAED,UAAI,aAAa,SAAS,GAAG;AAC3B,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS,sBAAsB,OAAO,sBAAsB,aAAa,MAAM;AAAA,UAC/E,MAAM,KAAK;AAAA,UACX,MAAM,KAAK;AAAA,QACb,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAGA,aAAW,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,OAAO,MAAM,gBAAAD,QAAG,SAAS,MAAM,MAAM;AAC3C,YAAM,aAAS,2BAAY,MAAM,EAAE,UAAU,KAAK,CAAC;AAEnD,iBAAW,OAAO,OAAO,UAAU;AACjC,cAAM,SAAS,iBAAAD,QAAK,QAAQ,iBAAAA,QAAK,QAAQ,IAAI,GAAG,GAAG;AACnD,cAAM,UAAU,UAAM,iBAAAE,SAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,YAAI,QAAQ,WAAW,GAAG;AACxB,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN,UAAU;AAAA,YACV,SAAS,oBAAoB,GAAG;AAAA,YAChC;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO,EAAE,OAAO;AAClB;",
|
|
6
|
+
"names": ["import_dotenv", "path", "fs", "fg"]
|
|
7
7
|
}
|
package/dist/cjs/resolver.js
CHANGED
|
@@ -22,7 +22,7 @@ __export(resolver_exports, {
|
|
|
22
22
|
module.exports = __toCommonJS(resolver_exports);
|
|
23
23
|
var import_strings = require("./strings.js");
|
|
24
24
|
var import_util = require("./util.js");
|
|
25
|
-
function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
25
|
+
function resolveRules({ rules, dims, ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
26
26
|
const indexed = [];
|
|
27
27
|
for (const r of rules) {
|
|
28
28
|
const scope = r.keySegments.slice(0, dims.length);
|
|
@@ -109,7 +109,7 @@ function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVa
|
|
|
109
109
|
continue;
|
|
110
110
|
}
|
|
111
111
|
}
|
|
112
|
-
const finalCfg = (0, import_strings.applyInterpolationDeep)(out, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
112
|
+
const finalCfg = (0, import_strings.applyInterpolationDeep)(out, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
113
113
|
Object.defineProperties(finalCfg, {
|
|
114
114
|
// Core methods
|
|
115
115
|
get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },
|
package/dist/cjs/resolver.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/resolver.js"],
|
|
4
|
-
"sourcesContent": ["import { applyInterpolationDeep } from \"./strings.js\";\nimport { getDeep, setDeep, deepMerge, isPlainObject } from \"./util.js\";\n\nexport function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n const indexed = [];\n for (const r of rules) {\n const scope = r.keySegments.slice(0, dims.length);\n const keyPath = r.keySegments.slice(dims.length);\n\n if (scope.length !== dims.length || keyPath.length === 0) {\n throw new Error(\n `${r.filePath}:${r.line}: Key must have ${dims.length} scope segments + at least one key segment: ${r.keyRaw}`\n );\n }\n\n indexed.push({\n ...r,\n scope,\n keyPath,\n keyPathStr: keyPath.join(\".\"),\n });\n }\n\n const ctxScope = dims.map((d) => String(ctx[d] ?? \"\"));\n\n const candidatesByKey = new Map();\n const postOps = [];\n\n for (const r of indexed) {\n if (!matches(r.scope, ctxScope)) continue;\n\n if (r.op === \"+=\" || r.op === \"-=\") postOps.push(r);\n else {\n if (!candidatesByKey.has(r.keyPathStr)) candidatesByKey.set(r.keyPathStr, []);\n candidatesByKey.get(r.keyPathStr).push(r);\n }\n }\n\n const out = {};\n const trace = new Map();\n\n for (const [key, cand] of candidatesByKey.entries()) {\n let winner = cand[0];\n let best = specificity(winner.scope);\n for (let i = 1; i < cand.length; i++) {\n const s = specificity(cand[i].scope);\n if (s > best) {\n best = s;\n winner = cand[i];\n } else if (s === best) {\n winner = cand[i];\n }\n }\n\n const existing = getDeep(out, winner.keyPath);\n\n if (winner.op === \":=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n continue;\n }\n\n if (winner.op === \"|=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n } else if (isPlainObject(existing) && isPlainObject(winner.value)) {\n setDeep(out, winner.keyPath, deepMerge(existing, winner.value));\n } else {\n setDeep(out, winner.keyPath, clone(winner.value));\n }\n trace.set(key, packTrace(winner, best));\n continue;\n }\n\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n\n postOps.sort((a, b) => specificity(a.scope) - specificity(b.scope));\n\n for (const r of postOps) {\n const key = r.keyPathStr;\n const best = specificity(r.scope);\n\n const existing = getDeep(out, r.keyPath);\n const val = clone(r.value);\n\n if (r.op === \"+=\") {\n const add = Array.isArray(val) ? val : [val];\n if (existing === undefined) setDeep(out, r.keyPath, add);\n else if (Array.isArray(existing)) setDeep(out, r.keyPath, existing.concat(add));\n else setDeep(out, r.keyPath, [existing].concat(add));\n trace.set(key, packTrace(r, best));\n continue;\n }\n\n if (r.op === \"-=\") {\n const remove = new Set(Array.isArray(val) ? val : [val]);\n if (Array.isArray(existing)) {\n setDeep(out, r.keyPath, existing.filter((x) => !remove.has(x)));\n trace.set(key, packTrace(r, best));\n }\n continue;\n }\n }\n\n const finalCfg = applyInterpolationDeep(out, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n\n Object.defineProperties(finalCfg, {\n // Core methods\n get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },\n explain: { enumerable: false, value: (p) => explainByPath(trace, p) },\n\n // Serialization - returns a plain object copy without non-enumerable methods\n toJSON: {\n enumerable: false,\n value: () => {\n const plain = {};\n for (const [k, v] of Object.entries(finalCfg)) {\n plain[k] = clone(v);\n }\n return plain;\n },\n },\n\n // List all keys (dot-notation paths)\n keys: {\n enumerable: false,\n value: () => collectKeys(finalCfg),\n },\n\n // Diff against another config\n diff: {\n enumerable: false,\n value: (other) => diffConfigs(finalCfg, other),\n },\n\n // Internal trace data\n _trace: { enumerable: false, value: trace },\n });\n\n return finalCfg;\n}\n\n/**\n * Collect all leaf keys in dot notation\n * @param {object} obj\n * @param {string} prefix\n * @returns {string[]}\n */\nfunction collectKeys(obj, prefix = \"\") {\n const keys = [];\n\n for (const [k, v] of Object.entries(obj)) {\n const path = prefix ? `${prefix}.${k}` : k;\n\n if (isPlainObject(v)) {\n keys.push(...collectKeys(v, path));\n } else {\n keys.push(path);\n }\n }\n\n return keys.sort();\n}\n\n/**\n * Diff two configs, returning added, removed, and changed keys\n * @param {object} a - First config\n * @param {object} b - Second config\n * @returns {{ added: string[], removed: string[], changed: Array<{key: string, from: any, to: any}> }}\n */\nfunction diffConfigs(a, b) {\n const aKeys = new Set(collectKeys(a));\n const bKeys = new Set(collectKeys(b));\n\n const added = [];\n const removed = [];\n const changed = [];\n\n // Keys in b but not in a\n for (const key of bKeys) {\n if (!aKeys.has(key)) {\n added.push(key);\n }\n }\n\n // Keys in a but not in b\n for (const key of aKeys) {\n if (!bKeys.has(key)) {\n removed.push(key);\n }\n }\n\n // Keys in both - check for changes\n for (const key of aKeys) {\n if (bKeys.has(key)) {\n const aVal = getByPath(a, key);\n const bVal = getByPath(b, key);\n\n if (!deepEqual(aVal, bVal)) {\n changed.push({ key, from: aVal, to: bVal });\n }\n }\n }\n\n return {\n added: added.sort(),\n removed: removed.sort(),\n changed: changed.sort((x, y) => x.key.localeCompare(y.key)),\n };\n}\n\n/**\n * Deep equality check for config values.\n * Note: Does not handle circular references (will stack overflow).\n * Config values should never be circular in practice.\n */\nfunction deepEqual(a, b) {\n if (a === b) return true;\n if (typeof a !== typeof b) return false;\n if (a === null || b === null) return a === b;\n\n if (Array.isArray(a) && Array.isArray(b)) {\n if (a.length !== b.length) return false;\n return a.every((v, i) => deepEqual(v, b[i]));\n }\n\n if (typeof a === \"object\" && typeof b === \"object\") {\n const aKeys = Object.keys(a);\n const bKeys = Object.keys(b);\n if (aKeys.length !== bKeys.length) return false;\n return aKeys.every((k) => deepEqual(a[k], b[k]));\n }\n\n return false;\n}\n\nfunction getByPath(obj, p) {\n const segs = String(p).split(\".\").filter(Boolean);\n return getDeep(obj, segs);\n}\n\nfunction explainByPath(trace, p) {\n const key = String(p);\n return trace.get(key) ?? null;\n}\n\nfunction matches(ruleScope, ctxScope) {\n for (let i = 0; i < ruleScope.length; i++) {\n const r = String(ruleScope[i]);\n const c = String(ctxScope[i]);\n if (r === \"*\") continue;\n if (r !== c) return false;\n }\n return true;\n}\n\nfunction specificity(ruleScope) {\n let s = 0;\n for (const seg of ruleScope) if (seg !== \"*\") s++;\n return s;\n}\n\nfunction packTrace(rule, spec) {\n return {\n op: rule.op,\n scope: rule.scope.map(String),\n specificity: spec,\n filePath: rule.filePath,\n line: rule.line,\n col: rule.col ?? 0,\n keyRaw: rule.keyRaw,\n // Source map style location\n source: `${rule.filePath}:${rule.line}:${rule.col ?? 0}`,\n };\n}\n\nfunction clone(v) {\n if (v === null || v === undefined) return v;\n if (Array.isArray(v)) return v.map(clone);\n if (typeof v === \"object\") return structuredClone(v);\n return v;\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAuC;AACvC,kBAA2D;AAEpD,SAAS,aAAa,EAAE,OAAO,MAAM,KAAK,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;
|
|
4
|
+
"sourcesContent": ["import { applyInterpolationDeep } from \"./strings.js\";\nimport { getDeep, setDeep, deepMerge, isPlainObject } from \"./util.js\";\n\nexport function resolveRules({ rules, dims, ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n const indexed = [];\n for (const r of rules) {\n const scope = r.keySegments.slice(0, dims.length);\n const keyPath = r.keySegments.slice(dims.length);\n\n if (scope.length !== dims.length || keyPath.length === 0) {\n throw new Error(\n `${r.filePath}:${r.line}: Key must have ${dims.length} scope segments + at least one key segment: ${r.keyRaw}`\n );\n }\n\n indexed.push({\n ...r,\n scope,\n keyPath,\n keyPathStr: keyPath.join(\".\"),\n });\n }\n\n const ctxScope = dims.map((d) => String(ctx[d] ?? \"\"));\n\n const candidatesByKey = new Map();\n const postOps = [];\n\n for (const r of indexed) {\n if (!matches(r.scope, ctxScope)) continue;\n\n if (r.op === \"+=\" || r.op === \"-=\") postOps.push(r);\n else {\n if (!candidatesByKey.has(r.keyPathStr)) candidatesByKey.set(r.keyPathStr, []);\n candidatesByKey.get(r.keyPathStr).push(r);\n }\n }\n\n const out = {};\n const trace = new Map();\n\n for (const [key, cand] of candidatesByKey.entries()) {\n let winner = cand[0];\n let best = specificity(winner.scope);\n for (let i = 1; i < cand.length; i++) {\n const s = specificity(cand[i].scope);\n if (s > best) {\n best = s;\n winner = cand[i];\n } else if (s === best) {\n winner = cand[i];\n }\n }\n\n const existing = getDeep(out, winner.keyPath);\n\n if (winner.op === \":=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n continue;\n }\n\n if (winner.op === \"|=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n } else if (isPlainObject(existing) && isPlainObject(winner.value)) {\n setDeep(out, winner.keyPath, deepMerge(existing, winner.value));\n } else {\n setDeep(out, winner.keyPath, clone(winner.value));\n }\n trace.set(key, packTrace(winner, best));\n continue;\n }\n\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n\n postOps.sort((a, b) => specificity(a.scope) - specificity(b.scope));\n\n for (const r of postOps) {\n const key = r.keyPathStr;\n const best = specificity(r.scope);\n\n const existing = getDeep(out, r.keyPath);\n const val = clone(r.value);\n\n if (r.op === \"+=\") {\n const add = Array.isArray(val) ? val : [val];\n if (existing === undefined) setDeep(out, r.keyPath, add);\n else if (Array.isArray(existing)) setDeep(out, r.keyPath, existing.concat(add));\n else setDeep(out, r.keyPath, [existing].concat(add));\n trace.set(key, packTrace(r, best));\n continue;\n }\n\n if (r.op === \"-=\") {\n const remove = new Set(Array.isArray(val) ? val : [val]);\n if (Array.isArray(existing)) {\n setDeep(out, r.keyPath, existing.filter((x) => !remove.has(x)));\n trace.set(key, packTrace(r, best));\n }\n continue;\n }\n }\n\n const finalCfg = applyInterpolationDeep(out, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n\n Object.defineProperties(finalCfg, {\n // Core methods\n get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },\n explain: { enumerable: false, value: (p) => explainByPath(trace, p) },\n\n // Serialization - returns a plain object copy without non-enumerable methods\n toJSON: {\n enumerable: false,\n value: () => {\n const plain = {};\n for (const [k, v] of Object.entries(finalCfg)) {\n plain[k] = clone(v);\n }\n return plain;\n },\n },\n\n // List all keys (dot-notation paths)\n keys: {\n enumerable: false,\n value: () => collectKeys(finalCfg),\n },\n\n // Diff against another config\n diff: {\n enumerable: false,\n value: (other) => diffConfigs(finalCfg, other),\n },\n\n // Internal trace data\n _trace: { enumerable: false, value: trace },\n });\n\n return finalCfg;\n}\n\n/**\n * Collect all leaf keys in dot notation\n * @param {object} obj\n * @param {string} prefix\n * @returns {string[]}\n */\nfunction collectKeys(obj, prefix = \"\") {\n const keys = [];\n\n for (const [k, v] of Object.entries(obj)) {\n const path = prefix ? `${prefix}.${k}` : k;\n\n if (isPlainObject(v)) {\n keys.push(...collectKeys(v, path));\n } else {\n keys.push(path);\n }\n }\n\n return keys.sort();\n}\n\n/**\n * Diff two configs, returning added, removed, and changed keys\n * @param {object} a - First config\n * @param {object} b - Second config\n * @returns {{ added: string[], removed: string[], changed: Array<{key: string, from: any, to: any}> }}\n */\nfunction diffConfigs(a, b) {\n const aKeys = new Set(collectKeys(a));\n const bKeys = new Set(collectKeys(b));\n\n const added = [];\n const removed = [];\n const changed = [];\n\n // Keys in b but not in a\n for (const key of bKeys) {\n if (!aKeys.has(key)) {\n added.push(key);\n }\n }\n\n // Keys in a but not in b\n for (const key of aKeys) {\n if (!bKeys.has(key)) {\n removed.push(key);\n }\n }\n\n // Keys in both - check for changes\n for (const key of aKeys) {\n if (bKeys.has(key)) {\n const aVal = getByPath(a, key);\n const bVal = getByPath(b, key);\n\n if (!deepEqual(aVal, bVal)) {\n changed.push({ key, from: aVal, to: bVal });\n }\n }\n }\n\n return {\n added: added.sort(),\n removed: removed.sort(),\n changed: changed.sort((x, y) => x.key.localeCompare(y.key)),\n };\n}\n\n/**\n * Deep equality check for config values.\n * Note: Does not handle circular references (will stack overflow).\n * Config values should never be circular in practice.\n */\nfunction deepEqual(a, b) {\n if (a === b) return true;\n if (typeof a !== typeof b) return false;\n if (a === null || b === null) return a === b;\n\n if (Array.isArray(a) && Array.isArray(b)) {\n if (a.length !== b.length) return false;\n return a.every((v, i) => deepEqual(v, b[i]));\n }\n\n if (typeof a === \"object\" && typeof b === \"object\") {\n const aKeys = Object.keys(a);\n const bKeys = Object.keys(b);\n if (aKeys.length !== bKeys.length) return false;\n return aKeys.every((k) => deepEqual(a[k], b[k]));\n }\n\n return false;\n}\n\nfunction getByPath(obj, p) {\n const segs = String(p).split(\".\").filter(Boolean);\n return getDeep(obj, segs);\n}\n\nfunction explainByPath(trace, p) {\n const key = String(p);\n return trace.get(key) ?? null;\n}\n\nfunction matches(ruleScope, ctxScope) {\n for (let i = 0; i < ruleScope.length; i++) {\n const r = String(ruleScope[i]);\n const c = String(ctxScope[i]);\n if (r === \"*\") continue;\n if (r !== c) return false;\n }\n return true;\n}\n\nfunction specificity(ruleScope) {\n let s = 0;\n for (const seg of ruleScope) if (seg !== \"*\") s++;\n return s;\n}\n\nfunction packTrace(rule, spec) {\n return {\n op: rule.op,\n scope: rule.scope.map(String),\n specificity: spec,\n filePath: rule.filePath,\n line: rule.line,\n col: rule.col ?? 0,\n keyRaw: rule.keyRaw,\n // Source map style location\n source: `${rule.filePath}:${rule.line}:${rule.col ?? 0}`,\n };\n}\n\nfunction clone(v) {\n if (v === null || v === undefined) return v;\n if (Array.isArray(v)) return v.map(clone);\n if (typeof v === \"object\") return structuredClone(v);\n return v;\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAuC;AACvC,kBAA2D;AAEpD,SAAS,aAAa,EAAE,OAAO,MAAM,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AAC7H,QAAM,UAAU,CAAC;AACjB,aAAW,KAAK,OAAO;AACrB,UAAM,QAAQ,EAAE,YAAY,MAAM,GAAG,KAAK,MAAM;AAChD,UAAM,UAAU,EAAE,YAAY,MAAM,KAAK,MAAM;AAE/C,QAAI,MAAM,WAAW,KAAK,UAAU,QAAQ,WAAW,GAAG;AACxD,YAAM,IAAI;AAAA,QACR,GAAG,EAAE,QAAQ,IAAI,EAAE,IAAI,mBAAmB,KAAK,MAAM,+CAA+C,EAAE,MAAM;AAAA,MAC9G;AAAA,IACF;AAEA,YAAQ,KAAK;AAAA,MACX,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA,YAAY,QAAQ,KAAK,GAAG;AAAA,IAC9B,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,KAAK,IAAI,CAAC,MAAM,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC;AAErD,QAAM,kBAAkB,oBAAI,IAAI;AAChC,QAAM,UAAU,CAAC;AAEjB,aAAW,KAAK,SAAS;AACvB,QAAI,CAAC,QAAQ,EAAE,OAAO,QAAQ,EAAG;AAEjC,QAAI,EAAE,OAAO,QAAQ,EAAE,OAAO,KAAM,SAAQ,KAAK,CAAC;AAAA,SAC7C;AACH,UAAI,CAAC,gBAAgB,IAAI,EAAE,UAAU,EAAG,iBAAgB,IAAI,EAAE,YAAY,CAAC,CAAC;AAC5E,sBAAgB,IAAI,EAAE,UAAU,EAAE,KAAK,CAAC;AAAA,IAC1C;AAAA,EACF;AAEA,QAAM,MAAM,CAAC;AACb,QAAM,QAAQ,oBAAI,IAAI;AAEtB,aAAW,CAAC,KAAK,IAAI,KAAK,gBAAgB,QAAQ,GAAG;AACnD,QAAI,SAAS,KAAK,CAAC;AACnB,QAAI,OAAO,YAAY,OAAO,KAAK;AACnC,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,IAAI,YAAY,KAAK,CAAC,EAAE,KAAK;AACnC,UAAI,IAAI,MAAM;AACZ,eAAO;AACP,iBAAS,KAAK,CAAC;AAAA,MACjB,WAAW,MAAM,MAAM;AACrB,iBAAS,KAAK,CAAC;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,eAAW,qBAAQ,KAAK,OAAO,OAAO;AAE5C,QAAI,OAAO,OAAO,MAAM;AACtB,UAAI,aAAa,QAAW;AAC1B,iCAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAChD,cAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AAAA,MACxC;AACA;AAAA,IACF;AAEA,QAAI,OAAO,OAAO,MAAM;AACtB,UAAI,aAAa,QAAW;AAC1B,iCAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAAA,MAClD,eAAW,2BAAc,QAAQ,SAAK,2BAAc,OAAO,KAAK,GAAG;AACjE,iCAAQ,KAAK,OAAO,aAAS,uBAAU,UAAU,OAAO,KAAK,CAAC;AAAA,MAChE,OAAO;AACL,iCAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAAA,MAClD;AACA,YAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AACtC;AAAA,IACF;AAEA,6BAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAChD,UAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AAAA,EACxC;AAEA,UAAQ,KAAK,CAAC,GAAG,MAAM,YAAY,EAAE,KAAK,IAAI,YAAY,EAAE,KAAK,CAAC;AAElE,aAAW,KAAK,SAAS;AACvB,UAAM,MAAM,EAAE;AACd,UAAM,OAAO,YAAY,EAAE,KAAK;AAEhC,UAAM,eAAW,qBAAQ,KAAK,EAAE,OAAO;AACvC,UAAM,MAAM,MAAM,EAAE,KAAK;AAEzB,QAAI,EAAE,OAAO,MAAM;AACjB,YAAM,MAAM,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAC3C,UAAI,aAAa,OAAW,0BAAQ,KAAK,EAAE,SAAS,GAAG;AAAA,eAC9C,MAAM,QAAQ,QAAQ,EAAG,0BAAQ,KAAK,EAAE,SAAS,SAAS,OAAO,GAAG,CAAC;AAAA,UACzE,0BAAQ,KAAK,EAAE,SAAS,CAAC,QAAQ,EAAE,OAAO,GAAG,CAAC;AACnD,YAAM,IAAI,KAAK,UAAU,GAAG,IAAI,CAAC;AACjC;AAAA,IACF;AAEA,QAAI,EAAE,OAAO,MAAM;AACjB,YAAM,SAAS,IAAI,IAAI,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG,CAAC;AACvD,UAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,iCAAQ,KAAK,EAAE,SAAS,SAAS,OAAO,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,CAAC,CAAC;AAC9D,cAAM,IAAI,KAAK,UAAU,GAAG,IAAI,CAAC;AAAA,MACnC;AACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,eAAW,uCAAuB,KAAK,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAEtG,SAAO,iBAAiB,UAAU;AAAA;AAAA,IAEhC,KAAK,EAAE,YAAY,OAAO,OAAO,CAAC,MAAM,UAAU,UAAU,CAAC,EAAE;AAAA,IAC/D,SAAS,EAAE,YAAY,OAAO,OAAO,CAAC,MAAM,cAAc,OAAO,CAAC,EAAE;AAAA;AAAA,IAGpE,QAAQ;AAAA,MACN,YAAY;AAAA,MACZ,OAAO,MAAM;AACX,cAAM,QAAQ,CAAC;AACf,mBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAC7C,gBAAM,CAAC,IAAI,MAAM,CAAC;AAAA,QACpB;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA,IAGA,MAAM;AAAA,MACJ,YAAY;AAAA,MACZ,OAAO,MAAM,YAAY,QAAQ;AAAA,IACnC;AAAA;AAAA,IAGA,MAAM;AAAA,MACJ,YAAY;AAAA,MACZ,OAAO,CAAC,UAAU,YAAY,UAAU,KAAK;AAAA,IAC/C;AAAA;AAAA,IAGA,QAAQ,EAAE,YAAY,OAAO,OAAO,MAAM;AAAA,EAC5C,CAAC;AAED,SAAO;AACT;AAQA,SAAS,YAAY,KAAK,SAAS,IAAI;AACrC,QAAM,OAAO,CAAC;AAEd,aAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,GAAG,GAAG;AACxC,UAAM,OAAO,SAAS,GAAG,MAAM,IAAI,CAAC,KAAK;AAEzC,YAAI,2BAAc,CAAC,GAAG;AACpB,WAAK,KAAK,GAAG,YAAY,GAAG,IAAI,CAAC;AAAA,IACnC,OAAO;AACL,WAAK,KAAK,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,KAAK,KAAK;AACnB;AAQA,SAAS,YAAY,GAAG,GAAG;AACzB,QAAM,QAAQ,IAAI,IAAI,YAAY,CAAC,CAAC;AACpC,QAAM,QAAQ,IAAI,IAAI,YAAY,CAAC,CAAC;AAEpC,QAAM,QAAQ,CAAC;AACf,QAAM,UAAU,CAAC;AACjB,QAAM,UAAU,CAAC;AAGjB,aAAW,OAAO,OAAO;AACvB,QAAI,CAAC,MAAM,IAAI,GAAG,GAAG;AACnB,YAAM,KAAK,GAAG;AAAA,IAChB;AAAA,EACF;AAGA,aAAW,OAAO,OAAO;AACvB,QAAI,CAAC,MAAM,IAAI,GAAG,GAAG;AACnB,cAAQ,KAAK,GAAG;AAAA,IAClB;AAAA,EACF;AAGA,aAAW,OAAO,OAAO;AACvB,QAAI,MAAM,IAAI,GAAG,GAAG;AAClB,YAAM,OAAO,UAAU,GAAG,GAAG;AAC7B,YAAM,OAAO,UAAU,GAAG,GAAG;AAE7B,UAAI,CAAC,UAAU,MAAM,IAAI,GAAG;AAC1B,gBAAQ,KAAK,EAAE,KAAK,MAAM,MAAM,IAAI,KAAK,CAAC;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,MAAM,KAAK;AAAA,IAClB,SAAS,QAAQ,KAAK;AAAA,IACtB,SAAS,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,IAAI,cAAc,EAAE,GAAG,CAAC;AAAA,EAC5D;AACF;AAOA,SAAS,UAAU,GAAG,GAAG;AACvB,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,OAAO,MAAM,OAAO,EAAG,QAAO;AAClC,MAAI,MAAM,QAAQ,MAAM,KAAM,QAAO,MAAM;AAE3C,MAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AACxC,QAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAO,EAAE,MAAM,CAAC,GAAG,MAAM,UAAU,GAAG,EAAE,CAAC,CAAC,CAAC;AAAA,EAC7C;AAEA,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAClD,UAAM,QAAQ,OAAO,KAAK,CAAC;AAC3B,UAAM,QAAQ,OAAO,KAAK,CAAC;AAC3B,QAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAC1C,WAAO,MAAM,MAAM,CAAC,MAAM,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;AAAA,EACjD;AAEA,SAAO;AACT;AAEA,SAAS,UAAU,KAAK,GAAG;AACzB,QAAM,OAAO,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,OAAO,OAAO;AAChD,aAAO,qBAAQ,KAAK,IAAI;AAC1B;AAEA,SAAS,cAAc,OAAO,GAAG;AAC/B,QAAM,MAAM,OAAO,CAAC;AACpB,SAAO,MAAM,IAAI,GAAG,KAAK;AAC3B;AAEA,SAAS,QAAQ,WAAW,UAAU;AACpC,WAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACzC,UAAM,IAAI,OAAO,UAAU,CAAC,CAAC;AAC7B,UAAM,IAAI,OAAO,SAAS,CAAC,CAAC;AAC5B,QAAI,MAAM,IAAK;AACf,QAAI,MAAM,EAAG,QAAO;AAAA,EACtB;AACA,SAAO;AACT;AAEA,SAAS,YAAY,WAAW;AAC9B,MAAI,IAAI;AACR,aAAW,OAAO,UAAW,KAAI,QAAQ,IAAK;AAC9C,SAAO;AACT;AAEA,SAAS,UAAU,MAAM,MAAM;AAC7B,SAAO;AAAA,IACL,IAAI,KAAK;AAAA,IACT,OAAO,KAAK,MAAM,IAAI,MAAM;AAAA,IAC5B,aAAa;AAAA,IACb,UAAU,KAAK;AAAA,IACf,MAAM,KAAK;AAAA,IACX,KAAK,KAAK,OAAO;AAAA,IACjB,QAAQ,KAAK;AAAA;AAAA,IAEb,QAAQ,GAAG,KAAK,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,OAAO,CAAC;AAAA,EACxD;AACF;AAEA,SAAS,MAAM,GAAG;AAChB,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAC1C,MAAI,MAAM,QAAQ,CAAC,EAAG,QAAO,EAAE,IAAI,KAAK;AACxC,MAAI,OAAO,MAAM,SAAU,QAAO,gBAAgB,CAAC;AACnD,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/cjs/strings.js
CHANGED
|
@@ -23,19 +23,20 @@ __export(strings_exports, {
|
|
|
23
23
|
module.exports = __toCommonJS(strings_exports);
|
|
24
24
|
var import_util = require("./util.js");
|
|
25
25
|
const RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;
|
|
26
|
-
function applyInterpolationDeep(value, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
27
|
-
if (typeof value === "string") return interpolate(value, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
28
|
-
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings }));
|
|
26
|
+
function applyInterpolationDeep(value, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
27
|
+
if (typeof value === "string") return interpolate(value, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
28
|
+
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings }));
|
|
29
29
|
if ((0, import_util.isPlainObject)(value)) {
|
|
30
30
|
const out = {};
|
|
31
31
|
for (const [k, v] of Object.entries(value)) {
|
|
32
|
-
out[k] = applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
32
|
+
out[k] = applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
33
33
|
}
|
|
34
34
|
return out;
|
|
35
35
|
}
|
|
36
36
|
return value;
|
|
37
37
|
}
|
|
38
|
-
function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
38
|
+
function interpolate(s, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
39
|
+
const envSource = env ? { ...process.env, ...env } : process.env;
|
|
39
40
|
return s.replace(/\$\{([^}]+)\}/g, (match, exprRaw) => {
|
|
40
41
|
const expr = exprRaw.trim();
|
|
41
42
|
if (expr.startsWith("ENV:")) {
|
|
@@ -48,7 +49,7 @@ function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null,
|
|
|
48
49
|
});
|
|
49
50
|
return "";
|
|
50
51
|
}
|
|
51
|
-
return
|
|
52
|
+
return envSource[k] ?? "";
|
|
52
53
|
}
|
|
53
54
|
if (expr.startsWith("VAR:")) {
|
|
54
55
|
const k = expr.slice(4).trim();
|
package/dist/cjs/strings.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/strings.js"],
|
|
4
|
-
"sourcesContent": ["import { isPlainObject } from \"./util.js\";\n\n// Regex to detect secret provider patterns: ${PROVIDER:key}\n// Provider must start with uppercase letter, followed by uppercase letters, digits, or underscores\nconst RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;\n\nexport function applyInterpolationDeep(value, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n if (typeof value === \"string\") return interpolate(value, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings }));\n if (isPlainObject(value)) {\n const out = {};\n for (const [k, v] of Object.entries(value)) {\n out[k] = applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n }\n return out;\n }\n return value;\n}\n\nexport function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n return s.replace(/\\$\\{([^}]+)\\}/g, (match, exprRaw) => {\n const expr = exprRaw.trim();\n\n if (expr.startsWith(\"ENV:\")) {\n const k = expr.slice(4).trim();\n // Security: check if env var is allowed\n if (allowedEnvVars !== null && !allowedEnvVars.includes(k)) {\n warnings.push({\n type: \"blocked_env_var\",\n var: k,\n message: `Access to environment variable \"${k}\" blocked by allowedEnvVars whitelist`,\n });\n return \"\";\n }\n return
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA8B;AAI9B,MAAM,qBAAqB;AAEpB,SAAS,uBAAuB,OAAO,EAAE,KAAK,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;
|
|
4
|
+
"sourcesContent": ["import { isPlainObject } from \"./util.js\";\n\n// Regex to detect secret provider patterns: ${PROVIDER:key}\n// Provider must start with uppercase letter, followed by uppercase letters, digits, or underscores\nconst RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;\n\nexport function applyInterpolationDeep(value, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n if (typeof value === \"string\") return interpolate(value, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings }));\n if (isPlainObject(value)) {\n const out = {};\n for (const [k, v] of Object.entries(value)) {\n out[k] = applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n }\n return out;\n }\n return value;\n}\n\nexport function interpolate(s, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n // Merge env with process.env (env takes precedence)\n const envSource = env ? { ...process.env, ...env } : process.env;\n\n return s.replace(/\\$\\{([^}]+)\\}/g, (match, exprRaw) => {\n const expr = exprRaw.trim();\n\n if (expr.startsWith(\"ENV:\")) {\n const k = expr.slice(4).trim();\n // Security: check if env var is allowed\n if (allowedEnvVars !== null && !allowedEnvVars.includes(k)) {\n warnings.push({\n type: \"blocked_env_var\",\n var: k,\n message: `Access to environment variable \"${k}\" blocked by allowedEnvVars whitelist`,\n });\n return \"\";\n }\n return envSource[k] ?? \"\";\n }\n\n if (expr.startsWith(\"VAR:\")) {\n const k = expr.slice(4).trim();\n // Security: check if custom var is allowed\n if (allowedVars !== null && !allowedVars.includes(k)) {\n warnings.push({\n type: \"blocked_var\",\n var: k,\n message: `Access to custom variable \"${k}\" blocked by allowedVars whitelist`,\n });\n return \"\";\n }\n if (vars?.[k] === undefined) {\n warnings.push({\n type: \"unknown_var\",\n var: k,\n message: `Unknown VAR \"${k}\" in interpolation ${match}`,\n });\n }\n return String(vars?.[k] ?? \"\");\n }\n\n if (expr.startsWith(\"ctx.\")) {\n const k = expr.slice(4).trim();\n if (ctx?.[k] === undefined) {\n warnings.push({\n type: \"unknown_ctx\",\n var: k,\n message: `Unknown ctx dimension \"${k}\" in interpolation ${match}`,\n });\n }\n return String(ctx?.[k] ?? \"\");\n }\n\n // Secret provider patterns (e.g., ${AWS:...}, ${OPENBAO:...}) - leave for secrets.js\n if (RX_SECRET_PROVIDER.test(expr)) {\n return match; // Keep the pattern intact for later secret processing\n }\n\n // Unknown interpolation pattern\n warnings.push({\n type: \"unknown_interpolation\",\n pattern: match,\n message: `Unknown interpolation pattern: ${match}`,\n });\n return \"\";\n });\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA8B;AAI9B,MAAM,qBAAqB;AAEpB,SAAS,uBAAuB,OAAO,EAAE,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AACjI,MAAI,OAAO,UAAU,SAAU,QAAO,YAAY,OAAO,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAClH,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO,MAAM,IAAI,CAAC,MAAM,uBAAuB,GAAG,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC,CAAC;AACtI,UAAI,2BAAc,KAAK,GAAG;AACxB,UAAM,MAAM,CAAC;AACb,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,UAAI,CAAC,IAAI,uBAAuB,GAAG,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAAA,IAC9F;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,YAAY,GAAG,EAAE,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AAElH,QAAM,YAAY,MAAM,EAAE,GAAG,QAAQ,KAAK,GAAG,IAAI,IAAI,QAAQ;AAE7D,SAAO,EAAE,QAAQ,kBAAkB,CAAC,OAAO,YAAY;AACrD,UAAM,OAAO,QAAQ,KAAK;AAE1B,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAE7B,UAAI,mBAAmB,QAAQ,CAAC,eAAe,SAAS,CAAC,GAAG;AAC1D,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,mCAAmC,CAAC;AAAA,QAC/C,CAAC;AACD,eAAO;AAAA,MACT;AACA,aAAO,UAAU,CAAC,KAAK;AAAA,IACzB;AAEA,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAE7B,UAAI,gBAAgB,QAAQ,CAAC,YAAY,SAAS,CAAC,GAAG;AACpD,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,8BAA8B,CAAC;AAAA,QAC1C,CAAC;AACD,eAAO;AAAA,MACT;AACA,UAAI,OAAO,CAAC,MAAM,QAAW;AAC3B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,gBAAgB,CAAC,sBAAsB,KAAK;AAAA,QACvD,CAAC;AAAA,MACH;AACA,aAAO,OAAO,OAAO,CAAC,KAAK,EAAE;AAAA,IAC/B;AAEA,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAC7B,UAAI,MAAM,CAAC,MAAM,QAAW;AAC1B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,0BAA0B,CAAC,sBAAsB,KAAK;AAAA,QACjE,CAAC;AAAA,MACH;AACA,aAAO,OAAO,MAAM,CAAC,KAAK,EAAE;AAAA,IAC9B;AAGA,QAAI,mBAAmB,KAAK,IAAI,GAAG;AACjC,aAAO;AAAA,IACT;AAGA,aAAS,KAAK;AAAA,MACZ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,kCAAkC,KAAK;AAAA,IAClD,CAAC;AACD,WAAO;AAAA,EACT,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
function parseDotenv(content) {
|
|
4
|
+
const result = {};
|
|
5
|
+
const lines = content.split(/\r?\n/);
|
|
6
|
+
for (let line of lines) {
|
|
7
|
+
line = line.trim();
|
|
8
|
+
if (!line || line.startsWith("#")) continue;
|
|
9
|
+
const match = line.match(/^([^=]+?)\s*=\s*(.*)$/);
|
|
10
|
+
if (!match) continue;
|
|
11
|
+
const key = match[1].trim();
|
|
12
|
+
let value = match[2].trim();
|
|
13
|
+
if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
|
|
14
|
+
value = value.slice(1, -1);
|
|
15
|
+
if (value.startsWith('"')) {
|
|
16
|
+
value = value.replace(/\\n/g, "\n").replace(/\\r/g, "\r").replace(/\\t/g, " ").replace(/\\"/g, '"').replace(/\\\\/g, "\\");
|
|
17
|
+
}
|
|
18
|
+
} else {
|
|
19
|
+
const commentIdx = value.indexOf(" #");
|
|
20
|
+
if (commentIdx !== -1) {
|
|
21
|
+
value = value.slice(0, commentIdx).trim();
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
result[key] = value;
|
|
25
|
+
}
|
|
26
|
+
return result;
|
|
27
|
+
}
|
|
28
|
+
async function loadDotenv(filePath) {
|
|
29
|
+
const content = await fs.readFile(path.resolve(filePath), "utf8");
|
|
30
|
+
return parseDotenv(content);
|
|
31
|
+
}
|
|
32
|
+
async function loadDotenvFiles(filePaths) {
|
|
33
|
+
const result = {};
|
|
34
|
+
for (const filePath of filePaths) {
|
|
35
|
+
try {
|
|
36
|
+
const vars = await loadDotenv(filePath);
|
|
37
|
+
Object.assign(result, vars);
|
|
38
|
+
} catch (err) {
|
|
39
|
+
if (err.code !== "ENOENT") throw err;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return result;
|
|
43
|
+
}
|
|
44
|
+
export {
|
|
45
|
+
loadDotenv,
|
|
46
|
+
loadDotenvFiles,
|
|
47
|
+
parseDotenv
|
|
48
|
+
};
|
|
49
|
+
//# sourceMappingURL=dotenv.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/dotenv.js"],
|
|
4
|
+
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\n\n/**\n * Parse a dotenv file and return key-value pairs\n * @param {string} content - The content of the .env file\n * @returns {Record<string, string>}\n */\nexport function parseDotenv(content) {\n const result = {};\n const lines = content.split(/\\r?\\n/);\n\n for (let line of lines) {\n // Remove comments (lines starting with # or lines with # after value)\n line = line.trim();\n if (!line || line.startsWith(\"#\")) continue;\n\n // Match KEY=VALUE or KEY=\"VALUE\" or KEY='VALUE'\n const match = line.match(/^([^=]+?)\\s*=\\s*(.*)$/);\n if (!match) continue;\n\n const key = match[1].trim();\n let value = match[2].trim();\n\n // Handle quoted values\n if ((value.startsWith('\"') && value.endsWith('\"')) ||\n (value.startsWith(\"'\") && value.endsWith(\"'\"))) {\n value = value.slice(1, -1);\n // Unescape common escape sequences for double-quoted strings\n if (value.startsWith('\"')) {\n value = value\n .replace(/\\\\n/g, \"\\n\")\n .replace(/\\\\r/g, \"\\r\")\n .replace(/\\\\t/g, \"\\t\")\n .replace(/\\\\\"/g, '\"')\n .replace(/\\\\\\\\/g, \"\\\\\");\n }\n } else {\n // Remove inline comments for unquoted values\n const commentIdx = value.indexOf(\" #\");\n if (commentIdx !== -1) {\n value = value.slice(0, commentIdx).trim();\n }\n }\n\n result[key] = value;\n }\n\n return result;\n}\n\n/**\n * Load a dotenv file from disk\n * @param {string} filePath - Path to the .env file\n * @returns {Promise<Record<string, string>>}\n */\nexport async function loadDotenv(filePath) {\n const content = await fs.readFile(path.resolve(filePath), \"utf8\");\n return parseDotenv(content);\n}\n\n/**\n * Load multiple dotenv files, with later files overriding earlier ones\n * @param {string[]} filePaths - Paths to .env files\n * @returns {Promise<Record<string, string>>}\n */\nexport async function loadDotenvFiles(filePaths) {\n const result = {};\n\n for (const filePath of filePaths) {\n try {\n const vars = await loadDotenv(filePath);\n Object.assign(result, vars);\n } catch (err) {\n // File doesn't exist - silently skip\n if (err.code !== \"ENOENT\") throw err;\n }\n }\n\n return result;\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAOV,SAAS,YAAY,SAAS;AACnC,QAAM,SAAS,CAAC;AAChB,QAAM,QAAQ,QAAQ,MAAM,OAAO;AAEnC,WAAS,QAAQ,OAAO;AAEtB,WAAO,KAAK,KAAK;AACjB,QAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AAGnC,UAAM,QAAQ,KAAK,MAAM,uBAAuB;AAChD,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,QAAI,QAAQ,MAAM,CAAC,EAAE,KAAK;AAG1B,QAAK,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,KAC3C,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,GAAI;AAClD,cAAQ,MAAM,MAAM,GAAG,EAAE;AAEzB,UAAI,MAAM,WAAW,GAAG,GAAG;AACzB,gBAAQ,MACL,QAAQ,QAAQ,IAAI,EACpB,QAAQ,QAAQ,IAAI,EACpB,QAAQ,QAAQ,GAAI,EACpB,QAAQ,QAAQ,GAAG,EACnB,QAAQ,SAAS,IAAI;AAAA,MAC1B;AAAA,IACF,OAAO;AAEL,YAAM,aAAa,MAAM,QAAQ,IAAI;AACrC,UAAI,eAAe,IAAI;AACrB,gBAAQ,MAAM,MAAM,GAAG,UAAU,EAAE,KAAK;AAAA,MAC1C;AAAA,IACF;AAEA,WAAO,GAAG,IAAI;AAAA,EAChB;AAEA,SAAO;AACT;AAOA,eAAsB,WAAW,UAAU;AACzC,QAAM,UAAU,MAAM,GAAG,SAAS,KAAK,QAAQ,QAAQ,GAAG,MAAM;AAChE,SAAO,YAAY,OAAO;AAC5B;AAOA,eAAsB,gBAAgB,WAAW;AAC/C,QAAM,SAAS,CAAC;AAEhB,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,YAAM,OAAO,MAAM,WAAW,QAAQ;AACtC,aAAO,OAAO,QAAQ,IAAI;AAAA,IAC5B,SAAS,KAAK;AAEZ,UAAI,IAAI,SAAS,SAAU,OAAM;AAAA,IACnC;AAAA,EACF;AAEA,SAAO;AACT;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/dist/esm/index.d.ts
CHANGED
|
@@ -60,6 +60,13 @@ export interface LoadColonyOptions {
|
|
|
60
60
|
vars?: Record<string, string>;
|
|
61
61
|
/** Schema validation hook (supports sync and async) */
|
|
62
62
|
schema?: (cfg: ColonyConfig) => ColonyConfig | Promise<ColonyConfig>;
|
|
63
|
+
/**
|
|
64
|
+
* Load environment variables from dotenv file(s).
|
|
65
|
+
* - `true`: Load from [".env", ".env.local"]
|
|
66
|
+
* - `string`: Load from single file path
|
|
67
|
+
* - `string[]`: Load from multiple file paths (later files override)
|
|
68
|
+
*/
|
|
69
|
+
dotenv?: boolean | string | string[];
|
|
63
70
|
/** Security sandbox options */
|
|
64
71
|
sandbox?: SandboxOptions;
|
|
65
72
|
/** Warn when skipping already-visited includes */
|
|
@@ -340,3 +347,22 @@ export class OpenBaoProvider implements SecretProvider {
|
|
|
340
347
|
fetch(key: string): Promise<string>;
|
|
341
348
|
validate(): Promise<void>;
|
|
342
349
|
}
|
|
350
|
+
|
|
351
|
+
// ============================================================================
|
|
352
|
+
// Dotenv
|
|
353
|
+
// ============================================================================
|
|
354
|
+
|
|
355
|
+
/**
|
|
356
|
+
* Parse dotenv file content and return key-value pairs
|
|
357
|
+
*/
|
|
358
|
+
export function parseDotenv(content: string): Record<string, string>;
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Load environment variables from a dotenv file
|
|
362
|
+
*/
|
|
363
|
+
export function loadDotenv(filePath: string): Promise<Record<string, string>>;
|
|
364
|
+
|
|
365
|
+
/**
|
|
366
|
+
* Load multiple dotenv files (later files override earlier ones)
|
|
367
|
+
*/
|
|
368
|
+
export function loadDotenvFiles(filePaths: string[]): Promise<Record<string, string>>;
|
package/dist/esm/index.js
CHANGED
|
@@ -11,9 +11,11 @@ import {
|
|
|
11
11
|
unregisterSecretProvider,
|
|
12
12
|
clearSecretProviders
|
|
13
13
|
} from "./secrets.js";
|
|
14
|
+
import { loadDotenvFiles, parseDotenv } from "./dotenv.js";
|
|
14
15
|
import { AwsSecretsProvider } from "./providers/aws.js";
|
|
15
16
|
import { VaultProvider } from "./providers/vault.js";
|
|
16
17
|
import { OpenBaoProvider } from "./providers/openbao.js";
|
|
18
|
+
import { parseDotenv as parseDotenv2, loadDotenv, loadDotenvFiles as loadDotenvFiles2 } from "./dotenv.js";
|
|
17
19
|
async function loadColony(opts) {
|
|
18
20
|
const entry = opts?.entry;
|
|
19
21
|
if (!entry) throw new Error("loadColony: opts.entry is required");
|
|
@@ -44,11 +46,25 @@ async function loadColony(opts) {
|
|
|
44
46
|
...opts.ctx
|
|
45
47
|
};
|
|
46
48
|
const vars = { ROOT: process.cwd(), ...opts.vars ?? {} };
|
|
49
|
+
let env = null;
|
|
50
|
+
if (opts.dotenv) {
|
|
51
|
+
let dotenvPaths;
|
|
52
|
+
if (opts.dotenv === true) {
|
|
53
|
+
dotenvPaths = [".env", ".env.local"];
|
|
54
|
+
} else if (typeof opts.dotenv === "string") {
|
|
55
|
+
dotenvPaths = [opts.dotenv];
|
|
56
|
+
} else if (Array.isArray(opts.dotenv)) {
|
|
57
|
+
dotenvPaths = opts.dotenv;
|
|
58
|
+
}
|
|
59
|
+
if (dotenvPaths) {
|
|
60
|
+
env = await loadDotenvFiles(dotenvPaths);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
47
63
|
const requires = parsed.flatMap((p) => p.requires ?? []);
|
|
48
64
|
const allRules = parsed.flatMap((p) => p.rules);
|
|
49
65
|
const allowedEnvVars = sandbox.allowedEnvVars ?? null;
|
|
50
66
|
const allowedVars = sandbox.allowedVars ?? null;
|
|
51
|
-
let cfg = resolveRules({ rules: allRules, dims, ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
67
|
+
let cfg = resolveRules({ rules: allRules, dims, ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
52
68
|
const secretsOpts = opts.secrets ?? {};
|
|
53
69
|
if (secretsOpts.providers?.length || hasGlobalProviders()) {
|
|
54
70
|
const cacheOpts = secretsOpts.cache ?? {};
|
|
@@ -340,6 +356,9 @@ export {
|
|
|
340
356
|
dryRunIncludes,
|
|
341
357
|
lintColony,
|
|
342
358
|
loadColony,
|
|
359
|
+
loadDotenv,
|
|
360
|
+
loadDotenvFiles2 as loadDotenvFiles,
|
|
361
|
+
parseDotenv2 as parseDotenv,
|
|
343
362
|
registerSecretProvider,
|
|
344
363
|
unregisterSecretProvider,
|
|
345
364
|
validateColony
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/index.js"],
|
|
4
|
-
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport fg from \"fast-glob\";\nimport { parseColony } from \"./parser.js\";\nimport { resolveRules } from \"./resolver.js\";\nimport {\n applySecretsDeep,\n SecretCache,\n hasGlobalProviders,\n registerSecretProvider,\n unregisterSecretProvider,\n clearSecretProviders,\n} from \"./secrets.js\";\n\n// Re-export secrets functions\nexport { registerSecretProvider, unregisterSecretProvider, clearSecretProviders };\n\n// Re-export providers\nexport { AwsSecretsProvider } from \"./providers/aws.js\";\nexport { VaultProvider } from \"./providers/vault.js\";\nexport { OpenBaoProvider } from \"./providers/openbao.js\";\n\n/**\n * @param {object} opts\n * @param {string} opts.entry\n * @param {string[]=} opts.dims\n * @param {Record<string,string>=} opts.ctx\n * @param {Record<string,string>=} opts.vars\n * @param {(cfg: any) => any=} opts.schema // optional validation hook (e.g. zod.parse)\n * @param {object=} opts.sandbox // security options\n * @param {string=} opts.sandbox.basePath // restrict includes to this directory\n * @param {string[]=} opts.sandbox.allowedEnvVars // whitelist of allowed env vars (null = allow all)\n * @param {number=} opts.sandbox.maxIncludeDepth // max depth for includes (default 50)\n * @param {boolean=} opts.warnOnSkippedIncludes // warn when skipping already-visited includes\n * @param {object=} opts.secrets // secrets provider options\n * @param {Array=} opts.secrets.providers // secret providers (e.g. AwsSecretsProvider)\n * @param {string[]=} opts.secrets.allowedSecrets // whitelist of allowed secret patterns\n * @param {object=} opts.secrets.cache // cache options\n * @param {string=} opts.secrets.onNotFound // 'empty' | 'warn' | 'error' (default: 'warn')\n * @returns {Promise<object>}\n */\nexport async function loadColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"loadColony: opts.entry is required\");\n\n const sandbox = opts.sandbox ?? {};\n const basePath = sandbox.basePath ? path.resolve(sandbox.basePath) : null;\n const maxIncludeDepth = sandbox.maxIncludeDepth ?? 50;\n const maxFileSize = sandbox.maxFileSize ?? null;\n const warnOnSkippedIncludes = opts.warnOnSkippedIncludes ?? false;\n\n const visited = new Set();\n const warnings = [];\n const files = await expandIncludes(entry, visited, {\n basePath,\n maxIncludeDepth,\n maxFileSize,\n warnOnSkippedIncludes,\n warnings,\n });\n\n const parsed = [];\n for (const file of files) {\n const text = await fs.readFile(file, \"utf8\");\n parsed.push(parseColony(text, { filePath: file }));\n }\n\n const dims =\n (Array.isArray(opts.dims) && opts.dims.length ? opts.dims : null) ??\n parsed.find((p) => p.dims?.length)?.dims ??\n [\"env\"];\n\n // ctx precedence: opts.ctx overrides, else @envDefaults, else sensible defaults\n const envDefaults = mergeEnvDefaults(parsed.map((p) => p.envDefaults ?? {}));\n const ctx = {\n ...envDefaults,\n env: process.env.NODE_ENV ?? \"dev\",\n ...opts.ctx,\n };\n\n const vars = { ROOT: process.cwd(), ...(opts.vars ?? {}) };\n\n // Collect requires from all parsed files\n const requires = parsed.flatMap((p) => p.requires ?? []);\n\n const allRules = parsed.flatMap((p) => p.rules);\n\n const allowedEnvVars = sandbox.allowedEnvVars ?? null;\n const allowedVars = sandbox.allowedVars ?? null;\n let cfg = resolveRules({ rules: allRules, dims, ctx, vars, allowedEnvVars, allowedVars, warnings });\n\n // Apply secrets if providers are configured\n const secretsOpts = opts.secrets ?? {};\n if (secretsOpts.providers?.length || hasGlobalProviders()) {\n const cacheOpts = secretsOpts.cache ?? {};\n const cache = cacheOpts.enabled !== false\n ? new SecretCache(cacheOpts.maxSize ?? 100)\n : null;\n\n const secretified = await applySecretsDeep(cfg, {\n providers: secretsOpts.providers ?? [],\n allowedSecrets: secretsOpts.allowedSecrets ?? null,\n cache,\n cacheTtl: cacheOpts.ttl ?? 300000,\n onNotFound: secretsOpts.onNotFound ?? \"warn\",\n warnings,\n });\n\n // Copy config methods to new object\n copyConfigMethods(secretified, cfg, warnings);\n cfg = secretified;\n }\n\n // Enforce @require after resolution\n const missing = [];\n for (const reqKey of requires) {\n if (cfg.get(reqKey) === undefined) missing.push(reqKey);\n }\n if (missing.length) {\n throw new Error(\n `COLONY @require failed (missing keys):\\n` +\n missing.map((k) => ` - ${k}`).join(\"\\n\")\n );\n }\n\n // Attach warnings as non-enumerable\n Object.defineProperty(cfg, \"_warnings\", { enumerable: false, value: warnings });\n\n // Optional schema validation hook (supports both sync and async)\n if (typeof opts.schema === \"function\") {\n const result = opts.schema(cfg);\n\n // Handle async schema validators (e.g., async Zod, Joi)\n if (result && typeof result.then === \"function\") {\n const validated = await result;\n if (validated && validated !== cfg) {\n copyConfigMethods(validated, cfg, warnings);\n return validated;\n }\n } else if (result && result !== cfg) {\n copyConfigMethods(result, cfg, warnings);\n return result;\n }\n }\n\n return cfg;\n}\n\n/**\n * Copy non-enumerable config methods to validated object\n */\nfunction copyConfigMethods(target, source, warnings) {\n Object.defineProperties(target, {\n get: { enumerable: false, value: source.get },\n explain: { enumerable: false, value: source.explain },\n toJSON: { enumerable: false, value: source.toJSON },\n keys: { enumerable: false, value: source.keys },\n diff: { enumerable: false, value: source.diff },\n _trace: { enumerable: false, value: source._trace },\n _warnings: { enumerable: false, value: warnings },\n });\n}\n\nfunction mergeEnvDefaults(list) {\n const out = {};\n for (const m of list) {\n for (const [k, v] of Object.entries(m)) out[k] = v;\n }\n return out;\n}\n\nasync function expandIncludes(entry, visited, { basePath, maxIncludeDepth, maxFileSize, warnOnSkippedIncludes, warnings }) {\n const absEntry = path.resolve(entry);\n const out = [];\n await dfs(absEntry, 0);\n return out;\n\n async function dfs(file, depth) {\n if (depth > maxIncludeDepth) {\n throw new Error(`COLONY: Max include depth (${maxIncludeDepth}) exceeded at: ${file}`);\n }\n\n const abs = path.resolve(file);\n\n if (visited.has(abs)) {\n if (warnOnSkippedIncludes) {\n warnings.push({ type: \"skipped_include\", file: abs, message: `Skipping already-visited include: ${abs}` });\n }\n return;\n }\n visited.add(abs);\n\n // Check file size if limit is set\n if (maxFileSize !== null) {\n const stat = await fs.stat(abs);\n if (stat.size > maxFileSize) {\n throw new Error(`COLONY: File size (${stat.size} bytes) exceeds maxFileSize (${maxFileSize} bytes): ${abs}`);\n }\n }\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n\n // Security: validate path is within basePath if set\n if (basePath !== null) {\n const normalizedInc = path.normalize(incAbs);\n if (!normalizedInc.startsWith(basePath + path.sep) && normalizedInc !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Include \"${inc}\" resolves to \"${normalizedInc}\" which is outside basePath \"${basePath}\"`\n );\n }\n }\n\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n // Sort alphabetically for deterministic ordering across platforms/filesystems\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n // Also validate glob matches against basePath\n if (basePath !== null) {\n const normalizedMatch = path.normalize(m);\n if (!normalizedMatch.startsWith(basePath + path.sep) && normalizedMatch !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Glob match \"${m}\" is outside basePath \"${basePath}\"`\n );\n }\n }\n await dfs(m, depth + 1);\n }\n }\n\n out.push(abs);\n }\n}\n\n/**\n * Validate syntax of colony files without resolving\n * @param {string} entry - Entry file path\n * @returns {Promise<{valid: boolean, files: string[], errors: Array<{file: string, error: string}>}>}\n */\nexport async function validateColony(entry) {\n const visited = new Set();\n const files = [];\n const errors = [];\n\n await validateDfs(path.resolve(entry));\n\n return {\n valid: errors.length === 0,\n files,\n errors,\n };\n\n async function validateDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs });\n files.push(abs);\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await validateDfs(m);\n }\n }\n } catch (e) {\n errors.push({ file: abs, error: e.message });\n }\n }\n}\n\n/**\n * Dry-run: list all files that would be included\n * @param {string} entry - Entry file path\n * @returns {Promise<string[]>}\n */\nexport async function dryRunIncludes(entry) {\n const visited = new Set();\n const files = [];\n await dryRunDfs(path.resolve(entry));\n return files;\n\n async function dryRunDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await dryRunDfs(m);\n }\n }\n\n files.push(abs);\n }\n}\n\n/**\n * Compare two configs loaded with different contexts\n * @param {object} opts - Same options as loadColony, but with ctx1 and ctx2\n * @param {Record<string,string>} opts.ctx1 - First context\n * @param {Record<string,string>} opts.ctx2 - Second context\n * @returns {Promise<{cfg1: object, cfg2: object, diff: object}>}\n */\nexport async function diffColony(opts) {\n const { ctx1, ctx2, ...baseOpts } = opts;\n\n if (!ctx1 || !ctx2) {\n throw new Error(\"diffColony: both ctx1 and ctx2 are required\");\n }\n\n const cfg1 = await loadColony({ ...baseOpts, ctx: ctx1 });\n const cfg2 = await loadColony({ ...baseOpts, ctx: ctx2 });\n\n return {\n cfg1,\n cfg2,\n diff: cfg1.diff(cfg2),\n };\n}\n\n/**\n * Lint colony files for potential issues\n * @param {object} opts\n * @param {string} opts.entry - Entry file path\n * @param {string[]=} opts.dims - Dimension names\n * @returns {Promise<{issues: Array<{type: string, severity: string, message: string, file?: string, line?: number}>}>}\n */\nexport async function lintColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"lintColony: opts.entry is required\");\n\n const issues = [];\n const visited = new Set();\n const allRules = [];\n const allFiles = [];\n let foundDims = null;\n\n // Collect all rules from all files\n await collectRules(path.resolve(entry));\n\n async function collectRules(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const parsed = parseColony(text, { filePath: abs });\n allFiles.push(abs);\n\n // Capture dims from first file that has them\n if (!foundDims && parsed.dims?.length) {\n foundDims = parsed.dims;\n }\n\n for (const rule of parsed.rules) {\n allRules.push({ ...rule, filePath: abs });\n }\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await collectRules(m);\n }\n }\n } catch (e) {\n issues.push({\n type: \"parse_error\",\n severity: \"error\",\n message: e.message,\n file: abs,\n });\n }\n }\n\n // Get dims from options, or from parsed files, or default\n const dims = opts.dims ?? foundDims ?? [\"env\"];\n\n // Check for shadowed rules (same key, same scope, different values)\n const rulesByKey = new Map();\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length).join(\".\");\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n const key = `${scope}|${keyPath}`;\n\n if (!rulesByKey.has(key)) {\n rulesByKey.set(key, []);\n }\n rulesByKey.get(key).push(rule);\n }\n\n for (const [key, rules] of rulesByKey.entries()) {\n if (rules.length > 1) {\n // Check if they're in different files or same file\n const locations = rules.map((r) => `${r.filePath}:${r.line}`);\n const uniqueLocations = new Set(locations);\n\n if (uniqueLocations.size > 1) {\n const [scope, keyPath] = key.split(\"|\");\n issues.push({\n type: \"shadowed_rule\",\n severity: \"warning\",\n message: `Rule \"${scope}.${keyPath}\" is defined ${rules.length} times. Later rule wins.`,\n file: rules[rules.length - 1].filePath,\n line: rules[rules.length - 1].line,\n });\n }\n }\n }\n\n // Check for potentially unused wildcard rules\n // (rules with all wildcards that might be overridden by more specific rules)\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length);\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n\n if (scope.every((s) => s === \"*\")) {\n // Check if there are more specific rules for the same key\n const moreSpecific = allRules.filter((r) => {\n const rKeyPath = r.keySegments.slice(dims.length).join(\".\");\n if (rKeyPath !== keyPath) return false;\n const rScope = r.keySegments.slice(0, dims.length);\n return rScope.some((s) => s !== \"*\") && r !== rule;\n });\n\n if (moreSpecific.length > 0) {\n issues.push({\n type: \"overridden_wildcard\",\n severity: \"info\",\n message: `Wildcard rule for \"${keyPath}\" is overridden by ${moreSpecific.length} more specific rule(s)`,\n file: rule.filePath,\n line: rule.line,\n });\n }\n }\n }\n\n // Check for empty includes\n for (const file of allFiles) {\n try {\n const text = await fs.readFile(file, \"utf8\");\n const parsed = parseColony(text, { filePath: file });\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(file), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n if (matches.length === 0) {\n issues.push({\n type: \"empty_include\",\n severity: \"warning\",\n message: `Include pattern \"${inc}\" matches no files`,\n file,\n });\n }\n }\n } catch {}\n }\n\n return { issues };\n}\n"],
|
|
5
|
-
"mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,mBAAmB;AAC5B,SAAS,oBAAoB;AAC7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;
|
|
6
|
-
"names": []
|
|
4
|
+
"sourcesContent": ["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport fg from \"fast-glob\";\nimport { parseColony } from \"./parser.js\";\nimport { resolveRules } from \"./resolver.js\";\nimport {\n applySecretsDeep,\n SecretCache,\n hasGlobalProviders,\n registerSecretProvider,\n unregisterSecretProvider,\n clearSecretProviders,\n} from \"./secrets.js\";\nimport { loadDotenvFiles, parseDotenv } from \"./dotenv.js\";\n\n// Re-export secrets functions\nexport { registerSecretProvider, unregisterSecretProvider, clearSecretProviders };\n\n// Re-export providers\nexport { AwsSecretsProvider } from \"./providers/aws.js\";\nexport { VaultProvider } from \"./providers/vault.js\";\nexport { OpenBaoProvider } from \"./providers/openbao.js\";\n\n// Re-export dotenv functions\nexport { parseDotenv, loadDotenv, loadDotenvFiles } from \"./dotenv.js\";\n\n/**\n * @param {object} opts\n * @param {string} opts.entry\n * @param {string[]=} opts.dims\n * @param {Record<string,string>=} opts.ctx\n * @param {Record<string,string>=} opts.vars\n * @param {(cfg: any) => any=} opts.schema // optional validation hook (e.g. zod.parse)\n * @param {string|string[]|boolean=} opts.dotenv // dotenv file path(s), or true for ['.env', '.env.local']\n * @param {object=} opts.sandbox // security options\n * @param {string=} opts.sandbox.basePath // restrict includes to this directory\n * @param {string[]=} opts.sandbox.allowedEnvVars // whitelist of allowed env vars (null = allow all)\n * @param {number=} opts.sandbox.maxIncludeDepth // max depth for includes (default 50)\n * @param {boolean=} opts.warnOnSkippedIncludes // warn when skipping already-visited includes\n * @param {object=} opts.secrets // secrets provider options\n * @param {Array=} opts.secrets.providers // secret providers (e.g. AwsSecretsProvider)\n * @param {string[]=} opts.secrets.allowedSecrets // whitelist of allowed secret patterns\n * @param {object=} opts.secrets.cache // cache options\n * @param {string=} opts.secrets.onNotFound // 'empty' | 'warn' | 'error' (default: 'warn')\n * @returns {Promise<object>}\n */\nexport async function loadColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"loadColony: opts.entry is required\");\n\n const sandbox = opts.sandbox ?? {};\n const basePath = sandbox.basePath ? path.resolve(sandbox.basePath) : null;\n const maxIncludeDepth = sandbox.maxIncludeDepth ?? 50;\n const maxFileSize = sandbox.maxFileSize ?? null;\n const warnOnSkippedIncludes = opts.warnOnSkippedIncludes ?? false;\n\n const visited = new Set();\n const warnings = [];\n const files = await expandIncludes(entry, visited, {\n basePath,\n maxIncludeDepth,\n maxFileSize,\n warnOnSkippedIncludes,\n warnings,\n });\n\n const parsed = [];\n for (const file of files) {\n const text = await fs.readFile(file, \"utf8\");\n parsed.push(parseColony(text, { filePath: file }));\n }\n\n const dims =\n (Array.isArray(opts.dims) && opts.dims.length ? opts.dims : null) ??\n parsed.find((p) => p.dims?.length)?.dims ??\n [\"env\"];\n\n // ctx precedence: opts.ctx overrides, else @envDefaults, else sensible defaults\n const envDefaults = mergeEnvDefaults(parsed.map((p) => p.envDefaults ?? {}));\n const ctx = {\n ...envDefaults,\n env: process.env.NODE_ENV ?? \"dev\",\n ...opts.ctx,\n };\n\n const vars = { ROOT: process.cwd(), ...(opts.vars ?? {}) };\n\n // Load dotenv files if configured\n let env = null;\n if (opts.dotenv) {\n let dotenvPaths;\n if (opts.dotenv === true) {\n dotenvPaths = [\".env\", \".env.local\"];\n } else if (typeof opts.dotenv === \"string\") {\n dotenvPaths = [opts.dotenv];\n } else if (Array.isArray(opts.dotenv)) {\n dotenvPaths = opts.dotenv;\n }\n if (dotenvPaths) {\n env = await loadDotenvFiles(dotenvPaths);\n }\n }\n\n // Collect requires from all parsed files\n const requires = parsed.flatMap((p) => p.requires ?? []);\n\n const allRules = parsed.flatMap((p) => p.rules);\n\n const allowedEnvVars = sandbox.allowedEnvVars ?? null;\n const allowedVars = sandbox.allowedVars ?? null;\n let cfg = resolveRules({ rules: allRules, dims, ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n\n // Apply secrets if providers are configured\n const secretsOpts = opts.secrets ?? {};\n if (secretsOpts.providers?.length || hasGlobalProviders()) {\n const cacheOpts = secretsOpts.cache ?? {};\n const cache = cacheOpts.enabled !== false\n ? new SecretCache(cacheOpts.maxSize ?? 100)\n : null;\n\n const secretified = await applySecretsDeep(cfg, {\n providers: secretsOpts.providers ?? [],\n allowedSecrets: secretsOpts.allowedSecrets ?? null,\n cache,\n cacheTtl: cacheOpts.ttl ?? 300000,\n onNotFound: secretsOpts.onNotFound ?? \"warn\",\n warnings,\n });\n\n // Copy config methods to new object\n copyConfigMethods(secretified, cfg, warnings);\n cfg = secretified;\n }\n\n // Enforce @require after resolution\n const missing = [];\n for (const reqKey of requires) {\n if (cfg.get(reqKey) === undefined) missing.push(reqKey);\n }\n if (missing.length) {\n throw new Error(\n `COLONY @require failed (missing keys):\\n` +\n missing.map((k) => ` - ${k}`).join(\"\\n\")\n );\n }\n\n // Attach warnings as non-enumerable\n Object.defineProperty(cfg, \"_warnings\", { enumerable: false, value: warnings });\n\n // Optional schema validation hook (supports both sync and async)\n if (typeof opts.schema === \"function\") {\n const result = opts.schema(cfg);\n\n // Handle async schema validators (e.g., async Zod, Joi)\n if (result && typeof result.then === \"function\") {\n const validated = await result;\n if (validated && validated !== cfg) {\n copyConfigMethods(validated, cfg, warnings);\n return validated;\n }\n } else if (result && result !== cfg) {\n copyConfigMethods(result, cfg, warnings);\n return result;\n }\n }\n\n return cfg;\n}\n\n/**\n * Copy non-enumerable config methods to validated object\n */\nfunction copyConfigMethods(target, source, warnings) {\n Object.defineProperties(target, {\n get: { enumerable: false, value: source.get },\n explain: { enumerable: false, value: source.explain },\n toJSON: { enumerable: false, value: source.toJSON },\n keys: { enumerable: false, value: source.keys },\n diff: { enumerable: false, value: source.diff },\n _trace: { enumerable: false, value: source._trace },\n _warnings: { enumerable: false, value: warnings },\n });\n}\n\nfunction mergeEnvDefaults(list) {\n const out = {};\n for (const m of list) {\n for (const [k, v] of Object.entries(m)) out[k] = v;\n }\n return out;\n}\n\nasync function expandIncludes(entry, visited, { basePath, maxIncludeDepth, maxFileSize, warnOnSkippedIncludes, warnings }) {\n const absEntry = path.resolve(entry);\n const out = [];\n await dfs(absEntry, 0);\n return out;\n\n async function dfs(file, depth) {\n if (depth > maxIncludeDepth) {\n throw new Error(`COLONY: Max include depth (${maxIncludeDepth}) exceeded at: ${file}`);\n }\n\n const abs = path.resolve(file);\n\n if (visited.has(abs)) {\n if (warnOnSkippedIncludes) {\n warnings.push({ type: \"skipped_include\", file: abs, message: `Skipping already-visited include: ${abs}` });\n }\n return;\n }\n visited.add(abs);\n\n // Check file size if limit is set\n if (maxFileSize !== null) {\n const stat = await fs.stat(abs);\n if (stat.size > maxFileSize) {\n throw new Error(`COLONY: File size (${stat.size} bytes) exceeds maxFileSize (${maxFileSize} bytes): ${abs}`);\n }\n }\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n\n // Security: validate path is within basePath if set\n if (basePath !== null) {\n const normalizedInc = path.normalize(incAbs);\n if (!normalizedInc.startsWith(basePath + path.sep) && normalizedInc !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Include \"${inc}\" resolves to \"${normalizedInc}\" which is outside basePath \"${basePath}\"`\n );\n }\n }\n\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n // Sort alphabetically for deterministic ordering across platforms/filesystems\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n // Also validate glob matches against basePath\n if (basePath !== null) {\n const normalizedMatch = path.normalize(m);\n if (!normalizedMatch.startsWith(basePath + path.sep) && normalizedMatch !== basePath) {\n throw new Error(\n `COLONY: Path traversal blocked. Glob match \"${m}\" is outside basePath \"${basePath}\"`\n );\n }\n }\n await dfs(m, depth + 1);\n }\n }\n\n out.push(abs);\n }\n}\n\n/**\n * Validate syntax of colony files without resolving\n * @param {string} entry - Entry file path\n * @returns {Promise<{valid: boolean, files: string[], errors: Array<{file: string, error: string}>}>}\n */\nexport async function validateColony(entry) {\n const visited = new Set();\n const files = [];\n const errors = [];\n\n await validateDfs(path.resolve(entry));\n\n return {\n valid: errors.length === 0,\n files,\n errors,\n };\n\n async function validateDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs });\n files.push(abs);\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await validateDfs(m);\n }\n }\n } catch (e) {\n errors.push({ file: abs, error: e.message });\n }\n }\n}\n\n/**\n * Dry-run: list all files that would be included\n * @param {string} entry - Entry file path\n * @returns {Promise<string[]>}\n */\nexport async function dryRunIncludes(entry) {\n const visited = new Set();\n const files = [];\n await dryRunDfs(path.resolve(entry));\n return files;\n\n async function dryRunDfs(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n const text = await fs.readFile(abs, \"utf8\");\n const { includes } = parseColony(text, { filePath: abs, parseOnlyDirectives: true });\n\n for (const inc of includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await dryRunDfs(m);\n }\n }\n\n files.push(abs);\n }\n}\n\n/**\n * Compare two configs loaded with different contexts\n * @param {object} opts - Same options as loadColony, but with ctx1 and ctx2\n * @param {Record<string,string>} opts.ctx1 - First context\n * @param {Record<string,string>} opts.ctx2 - Second context\n * @returns {Promise<{cfg1: object, cfg2: object, diff: object}>}\n */\nexport async function diffColony(opts) {\n const { ctx1, ctx2, ...baseOpts } = opts;\n\n if (!ctx1 || !ctx2) {\n throw new Error(\"diffColony: both ctx1 and ctx2 are required\");\n }\n\n const cfg1 = await loadColony({ ...baseOpts, ctx: ctx1 });\n const cfg2 = await loadColony({ ...baseOpts, ctx: ctx2 });\n\n return {\n cfg1,\n cfg2,\n diff: cfg1.diff(cfg2),\n };\n}\n\n/**\n * Lint colony files for potential issues\n * @param {object} opts\n * @param {string} opts.entry - Entry file path\n * @param {string[]=} opts.dims - Dimension names\n * @returns {Promise<{issues: Array<{type: string, severity: string, message: string, file?: string, line?: number}>}>}\n */\nexport async function lintColony(opts) {\n const entry = opts?.entry;\n if (!entry) throw new Error(\"lintColony: opts.entry is required\");\n\n const issues = [];\n const visited = new Set();\n const allRules = [];\n const allFiles = [];\n let foundDims = null;\n\n // Collect all rules from all files\n await collectRules(path.resolve(entry));\n\n async function collectRules(file) {\n const abs = path.resolve(file);\n if (visited.has(abs)) return;\n visited.add(abs);\n\n try {\n const text = await fs.readFile(abs, \"utf8\");\n const parsed = parseColony(text, { filePath: abs });\n allFiles.push(abs);\n\n // Capture dims from first file that has them\n if (!foundDims && parsed.dims?.length) {\n foundDims = parsed.dims;\n }\n\n for (const rule of parsed.rules) {\n allRules.push({ ...rule, filePath: abs });\n }\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(abs), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n for (const m of matches.sort((a, b) => a.localeCompare(b))) {\n await collectRules(m);\n }\n }\n } catch (e) {\n issues.push({\n type: \"parse_error\",\n severity: \"error\",\n message: e.message,\n file: abs,\n });\n }\n }\n\n // Get dims from options, or from parsed files, or default\n const dims = opts.dims ?? foundDims ?? [\"env\"];\n\n // Check for shadowed rules (same key, same scope, different values)\n const rulesByKey = new Map();\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length).join(\".\");\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n const key = `${scope}|${keyPath}`;\n\n if (!rulesByKey.has(key)) {\n rulesByKey.set(key, []);\n }\n rulesByKey.get(key).push(rule);\n }\n\n for (const [key, rules] of rulesByKey.entries()) {\n if (rules.length > 1) {\n // Check if they're in different files or same file\n const locations = rules.map((r) => `${r.filePath}:${r.line}`);\n const uniqueLocations = new Set(locations);\n\n if (uniqueLocations.size > 1) {\n const [scope, keyPath] = key.split(\"|\");\n issues.push({\n type: \"shadowed_rule\",\n severity: \"warning\",\n message: `Rule \"${scope}.${keyPath}\" is defined ${rules.length} times. Later rule wins.`,\n file: rules[rules.length - 1].filePath,\n line: rules[rules.length - 1].line,\n });\n }\n }\n }\n\n // Check for potentially unused wildcard rules\n // (rules with all wildcards that might be overridden by more specific rules)\n for (const rule of allRules) {\n const scope = rule.keySegments.slice(0, dims.length);\n const keyPath = rule.keySegments.slice(dims.length).join(\".\");\n\n if (scope.every((s) => s === \"*\")) {\n // Check if there are more specific rules for the same key\n const moreSpecific = allRules.filter((r) => {\n const rKeyPath = r.keySegments.slice(dims.length).join(\".\");\n if (rKeyPath !== keyPath) return false;\n const rScope = r.keySegments.slice(0, dims.length);\n return rScope.some((s) => s !== \"*\") && r !== rule;\n });\n\n if (moreSpecific.length > 0) {\n issues.push({\n type: \"overridden_wildcard\",\n severity: \"info\",\n message: `Wildcard rule for \"${keyPath}\" is overridden by ${moreSpecific.length} more specific rule(s)`,\n file: rule.filePath,\n line: rule.line,\n });\n }\n }\n }\n\n // Check for empty includes\n for (const file of allFiles) {\n try {\n const text = await fs.readFile(file, \"utf8\");\n const parsed = parseColony(text, { filePath: file });\n\n for (const inc of parsed.includes) {\n const incAbs = path.resolve(path.dirname(file), inc);\n const matches = await fg(incAbs.replace(/\\\\/g, \"/\"), { dot: true });\n if (matches.length === 0) {\n issues.push({\n type: \"empty_include\",\n severity: \"warning\",\n message: `Include pattern \"${inc}\" matches no files`,\n file,\n });\n }\n }\n } catch {}\n }\n\n return { issues };\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,mBAAmB;AAC5B,SAAS,oBAAoB;AAC7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,iBAAiB,mBAAmB;AAM7C,SAAS,0BAA0B;AACnC,SAAS,qBAAqB;AAC9B,SAAS,uBAAuB;AAGhC,SAAS,eAAAA,cAAa,YAAY,mBAAAC,wBAAuB;AAsBzD,eAAsB,WAAW,MAAM;AACrC,QAAM,QAAQ,MAAM;AACpB,MAAI,CAAC,MAAO,OAAM,IAAI,MAAM,oCAAoC;AAEhE,QAAM,UAAU,KAAK,WAAW,CAAC;AACjC,QAAM,WAAW,QAAQ,WAAW,KAAK,QAAQ,QAAQ,QAAQ,IAAI;AACrE,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,wBAAwB,KAAK,yBAAyB;AAE5D,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,WAAW,CAAC;AAClB,QAAM,QAAQ,MAAM,eAAe,OAAO,SAAS;AAAA,IACjD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,SAAS,CAAC;AAChB,aAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,MAAM,GAAG,SAAS,MAAM,MAAM;AAC3C,WAAO,KAAK,YAAY,MAAM,EAAE,UAAU,KAAK,CAAC,CAAC;AAAA,EACnD;AAEA,QAAM,QACH,MAAM,QAAQ,KAAK,IAAI,KAAK,KAAK,KAAK,SAAS,KAAK,OAAO,SAC5D,OAAO,KAAK,CAAC,MAAM,EAAE,MAAM,MAAM,GAAG,QACpC,CAAC,KAAK;AAGR,QAAM,cAAc,iBAAiB,OAAO,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC;AAC3E,QAAM,MAAM;AAAA,IACV,GAAG;AAAA,IACH,KAAK,QAAQ,IAAI,YAAY;AAAA,IAC7B,GAAG,KAAK;AAAA,EACV;AAEA,QAAM,OAAO,EAAE,MAAM,QAAQ,IAAI,GAAG,GAAI,KAAK,QAAQ,CAAC,EAAG;AAGzD,MAAI,MAAM;AACV,MAAI,KAAK,QAAQ;AACf,QAAI;AACJ,QAAI,KAAK,WAAW,MAAM;AACxB,oBAAc,CAAC,QAAQ,YAAY;AAAA,IACrC,WAAW,OAAO,KAAK,WAAW,UAAU;AAC1C,oBAAc,CAAC,KAAK,MAAM;AAAA,IAC5B,WAAW,MAAM,QAAQ,KAAK,MAAM,GAAG;AACrC,oBAAc,KAAK;AAAA,IACrB;AACA,QAAI,aAAa;AACf,YAAM,MAAM,gBAAgB,WAAW;AAAA,IACzC;AAAA,EACF;AAGA,QAAM,WAAW,OAAO,QAAQ,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;AAEvD,QAAM,WAAW,OAAO,QAAQ,CAAC,MAAM,EAAE,KAAK;AAE9C,QAAM,iBAAiB,QAAQ,kBAAkB;AACjD,QAAM,cAAc,QAAQ,eAAe;AAC3C,MAAI,MAAM,aAAa,EAAE,OAAO,UAAU,MAAM,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAGvG,QAAM,cAAc,KAAK,WAAW,CAAC;AACrC,MAAI,YAAY,WAAW,UAAU,mBAAmB,GAAG;AACzD,UAAM,YAAY,YAAY,SAAS,CAAC;AACxC,UAAM,QAAQ,UAAU,YAAY,QAChC,IAAI,YAAY,UAAU,WAAW,GAAG,IACxC;AAEJ,UAAM,cAAc,MAAM,iBAAiB,KAAK;AAAA,MAC9C,WAAW,YAAY,aAAa,CAAC;AAAA,MACrC,gBAAgB,YAAY,kBAAkB;AAAA,MAC9C;AAAA,MACA,UAAU,UAAU,OAAO;AAAA,MAC3B,YAAY,YAAY,cAAc;AAAA,MACtC;AAAA,IACF,CAAC;AAGD,sBAAkB,aAAa,KAAK,QAAQ;AAC5C,UAAM;AAAA,EACR;AAGA,QAAM,UAAU,CAAC;AACjB,aAAW,UAAU,UAAU;AAC7B,QAAI,IAAI,IAAI,MAAM,MAAM,OAAW,SAAQ,KAAK,MAAM;AAAA,EACxD;AACA,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI;AAAA,MACR;AAAA,IACA,QAAQ,IAAI,CAAC,MAAM,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI;AAAA,IAC1C;AAAA,EACF;AAGA,SAAO,eAAe,KAAK,aAAa,EAAE,YAAY,OAAO,OAAO,SAAS,CAAC;AAG9E,MAAI,OAAO,KAAK,WAAW,YAAY;AACrC,UAAM,SAAS,KAAK,OAAO,GAAG;AAG9B,QAAI,UAAU,OAAO,OAAO,SAAS,YAAY;AAC/C,YAAM,YAAY,MAAM;AACxB,UAAI,aAAa,cAAc,KAAK;AAClC,0BAAkB,WAAW,KAAK,QAAQ;AAC1C,eAAO;AAAA,MACT;AAAA,IACF,WAAW,UAAU,WAAW,KAAK;AACnC,wBAAkB,QAAQ,KAAK,QAAQ;AACvC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,kBAAkB,QAAQ,QAAQ,UAAU;AACnD,SAAO,iBAAiB,QAAQ;AAAA,IAC9B,KAAK,EAAE,YAAY,OAAO,OAAO,OAAO,IAAI;AAAA,IAC5C,SAAS,EAAE,YAAY,OAAO,OAAO,OAAO,QAAQ;AAAA,IACpD,QAAQ,EAAE,YAAY,OAAO,OAAO,OAAO,OAAO;AAAA,IAClD,MAAM,EAAE,YAAY,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9C,MAAM,EAAE,YAAY,OAAO,OAAO,OAAO,KAAK;AAAA,IAC9C,QAAQ,EAAE,YAAY,OAAO,OAAO,OAAO,OAAO;AAAA,IAClD,WAAW,EAAE,YAAY,OAAO,OAAO,SAAS;AAAA,EAClD,CAAC;AACH;AAEA,SAAS,iBAAiB,MAAM;AAC9B,QAAM,MAAM,CAAC;AACb,aAAW,KAAK,MAAM;AACpB,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,CAAC,EAAG,KAAI,CAAC,IAAI;AAAA,EACnD;AACA,SAAO;AACT;AAEA,eAAe,eAAe,OAAO,SAAS,EAAE,UAAU,iBAAiB,aAAa,uBAAuB,SAAS,GAAG;AACzH,QAAM,WAAW,KAAK,QAAQ,KAAK;AACnC,QAAM,MAAM,CAAC;AACb,QAAM,IAAI,UAAU,CAAC;AACrB,SAAO;AAEP,iBAAe,IAAI,MAAM,OAAO;AAC9B,QAAI,QAAQ,iBAAiB;AAC3B,YAAM,IAAI,MAAM,8BAA8B,eAAe,kBAAkB,IAAI,EAAE;AAAA,IACvF;AAEA,UAAM,MAAM,KAAK,QAAQ,IAAI;AAE7B,QAAI,QAAQ,IAAI,GAAG,GAAG;AACpB,UAAI,uBAAuB;AACzB,iBAAS,KAAK,EAAE,MAAM,mBAAmB,MAAM,KAAK,SAAS,qCAAqC,GAAG,GAAG,CAAC;AAAA,MAC3G;AACA;AAAA,IACF;AACA,YAAQ,IAAI,GAAG;AAGf,QAAI,gBAAgB,MAAM;AACxB,YAAM,OAAO,MAAM,GAAG,KAAK,GAAG;AAC9B,UAAI,KAAK,OAAO,aAAa;AAC3B,cAAM,IAAI,MAAM,sBAAsB,KAAK,IAAI,gCAAgC,WAAW,YAAY,GAAG,EAAE;AAAA,MAC7G;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,GAAG,SAAS,KAAK,MAAM;AAC1C,UAAM,EAAE,SAAS,IAAI,YAAY,MAAM,EAAE,UAAU,KAAK,qBAAqB,KAAK,CAAC;AAEnF,eAAW,OAAO,UAAU;AAC1B,YAAM,SAAS,KAAK,QAAQ,KAAK,QAAQ,GAAG,GAAG,GAAG;AAGlD,UAAI,aAAa,MAAM;AACrB,cAAM,gBAAgB,KAAK,UAAU,MAAM;AAC3C,YAAI,CAAC,cAAc,WAAW,WAAW,KAAK,GAAG,KAAK,kBAAkB,UAAU;AAChF,gBAAM,IAAI;AAAA,YACR,4CAA4C,GAAG,kBAAkB,aAAa,gCAAgC,QAAQ;AAAA,UACxH;AAAA,QACF;AAAA,MACF;AAEA,YAAM,UAAU,MAAM,GAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAElE,iBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAE1D,YAAI,aAAa,MAAM;AACrB,gBAAM,kBAAkB,KAAK,UAAU,CAAC;AACxC,cAAI,CAAC,gBAAgB,WAAW,WAAW,KAAK,GAAG,KAAK,oBAAoB,UAAU;AACpF,kBAAM,IAAI;AAAA,cACR,+CAA+C,CAAC,0BAA0B,QAAQ;AAAA,YACpF;AAAA,UACF;AAAA,QACF;AACA,cAAM,IAAI,GAAG,QAAQ,CAAC;AAAA,MACxB;AAAA,IACF;AAEA,QAAI,KAAK,GAAG;AAAA,EACd;AACF;AAOA,eAAsB,eAAe,OAAO;AAC1C,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,QAAQ,CAAC;AACf,QAAM,SAAS,CAAC;AAEhB,QAAM,YAAY,KAAK,QAAQ,KAAK,CAAC;AAErC,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AAEA,iBAAe,YAAY,MAAM;AAC/B,UAAM,MAAM,KAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,QAAI;AACF,YAAM,OAAO,MAAM,GAAG,SAAS,KAAK,MAAM;AAC1C,YAAM,EAAE,SAAS,IAAI,YAAY,MAAM,EAAE,UAAU,IAAI,CAAC;AACxD,YAAM,KAAK,GAAG;AAEd,iBAAW,OAAO,UAAU;AAC1B,cAAM,SAAS,KAAK,QAAQ,KAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,cAAM,UAAU,MAAM,GAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,mBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,gBAAM,YAAY,CAAC;AAAA,QACrB;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,aAAO,KAAK,EAAE,MAAM,KAAK,OAAO,EAAE,QAAQ,CAAC;AAAA,IAC7C;AAAA,EACF;AACF;AAOA,eAAsB,eAAe,OAAO;AAC1C,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,QAAQ,CAAC;AACf,QAAM,UAAU,KAAK,QAAQ,KAAK,CAAC;AACnC,SAAO;AAEP,iBAAe,UAAU,MAAM;AAC7B,UAAM,MAAM,KAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,UAAM,OAAO,MAAM,GAAG,SAAS,KAAK,MAAM;AAC1C,UAAM,EAAE,SAAS,IAAI,YAAY,MAAM,EAAE,UAAU,KAAK,qBAAqB,KAAK,CAAC;AAEnF,eAAW,OAAO,UAAU;AAC1B,YAAM,SAAS,KAAK,QAAQ,KAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,YAAM,UAAU,MAAM,GAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,iBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,cAAM,UAAU,CAAC;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,KAAK,GAAG;AAAA,EAChB;AACF;AASA,eAAsB,WAAW,MAAM;AACrC,QAAM,EAAE,MAAM,MAAM,GAAG,SAAS,IAAI;AAEpC,MAAI,CAAC,QAAQ,CAAC,MAAM;AAClB,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAEA,QAAM,OAAO,MAAM,WAAW,EAAE,GAAG,UAAU,KAAK,KAAK,CAAC;AACxD,QAAM,OAAO,MAAM,WAAW,EAAE,GAAG,UAAU,KAAK,KAAK,CAAC;AAExD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,MAAM,KAAK,KAAK,IAAI;AAAA,EACtB;AACF;AASA,eAAsB,WAAW,MAAM;AACrC,QAAM,QAAQ,MAAM;AACpB,MAAI,CAAC,MAAO,OAAM,IAAI,MAAM,oCAAoC;AAEhE,QAAM,SAAS,CAAC;AAChB,QAAM,UAAU,oBAAI,IAAI;AACxB,QAAM,WAAW,CAAC;AAClB,QAAM,WAAW,CAAC;AAClB,MAAI,YAAY;AAGhB,QAAM,aAAa,KAAK,QAAQ,KAAK,CAAC;AAEtC,iBAAe,aAAa,MAAM;AAChC,UAAM,MAAM,KAAK,QAAQ,IAAI;AAC7B,QAAI,QAAQ,IAAI,GAAG,EAAG;AACtB,YAAQ,IAAI,GAAG;AAEf,QAAI;AACF,YAAM,OAAO,MAAM,GAAG,SAAS,KAAK,MAAM;AAC1C,YAAM,SAAS,YAAY,MAAM,EAAE,UAAU,IAAI,CAAC;AAClD,eAAS,KAAK,GAAG;AAGjB,UAAI,CAAC,aAAa,OAAO,MAAM,QAAQ;AACrC,oBAAY,OAAO;AAAA,MACrB;AAEA,iBAAW,QAAQ,OAAO,OAAO;AAC/B,iBAAS,KAAK,EAAE,GAAG,MAAM,UAAU,IAAI,CAAC;AAAA,MAC1C;AAEA,iBAAW,OAAO,OAAO,UAAU;AACjC,cAAM,SAAS,KAAK,QAAQ,KAAK,QAAQ,GAAG,GAAG,GAAG;AAClD,cAAM,UAAU,MAAM,GAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,mBAAW,KAAK,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC,GAAG;AAC1D,gBAAM,aAAa,CAAC;AAAA,QACtB;AAAA,MACF;AAAA,IACF,SAAS,GAAG;AACV,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,EAAE;AAAA,QACX,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,OAAO,KAAK,QAAQ,aAAa,CAAC,KAAK;AAG7C,QAAM,aAAa,oBAAI,IAAI;AAC3B,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,KAAK,YAAY,MAAM,GAAG,KAAK,MAAM,EAAE,KAAK,GAAG;AAC7D,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAC5D,UAAM,MAAM,GAAG,KAAK,IAAI,OAAO;AAE/B,QAAI,CAAC,WAAW,IAAI,GAAG,GAAG;AACxB,iBAAW,IAAI,KAAK,CAAC,CAAC;AAAA,IACxB;AACA,eAAW,IAAI,GAAG,EAAE,KAAK,IAAI;AAAA,EAC/B;AAEA,aAAW,CAAC,KAAK,KAAK,KAAK,WAAW,QAAQ,GAAG;AAC/C,QAAI,MAAM,SAAS,GAAG;AAEpB,YAAM,YAAY,MAAM,IAAI,CAAC,MAAM,GAAG,EAAE,QAAQ,IAAI,EAAE,IAAI,EAAE;AAC5D,YAAM,kBAAkB,IAAI,IAAI,SAAS;AAEzC,UAAI,gBAAgB,OAAO,GAAG;AAC5B,cAAM,CAAC,OAAO,OAAO,IAAI,IAAI,MAAM,GAAG;AACtC,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS,SAAS,KAAK,IAAI,OAAO,gBAAgB,MAAM,MAAM;AAAA,UAC9D,MAAM,MAAM,MAAM,SAAS,CAAC,EAAE;AAAA,UAC9B,MAAM,MAAM,MAAM,SAAS,CAAC,EAAE;AAAA,QAChC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAIA,aAAW,QAAQ,UAAU;AAC3B,UAAM,QAAQ,KAAK,YAAY,MAAM,GAAG,KAAK,MAAM;AACnD,UAAM,UAAU,KAAK,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAE5D,QAAI,MAAM,MAAM,CAAC,MAAM,MAAM,GAAG,GAAG;AAEjC,YAAM,eAAe,SAAS,OAAO,CAAC,MAAM;AAC1C,cAAM,WAAW,EAAE,YAAY,MAAM,KAAK,MAAM,EAAE,KAAK,GAAG;AAC1D,YAAI,aAAa,QAAS,QAAO;AACjC,cAAM,SAAS,EAAE,YAAY,MAAM,GAAG,KAAK,MAAM;AACjD,eAAO,OAAO,KAAK,CAAC,MAAM,MAAM,GAAG,KAAK,MAAM;AAAA,MAChD,CAAC;AAED,UAAI,aAAa,SAAS,GAAG;AAC3B,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,UACV,SAAS,sBAAsB,OAAO,sBAAsB,aAAa,MAAM;AAAA,UAC/E,MAAM,KAAK;AAAA,UACX,MAAM,KAAK;AAAA,QACb,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAGA,aAAW,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,OAAO,MAAM,GAAG,SAAS,MAAM,MAAM;AAC3C,YAAM,SAAS,YAAY,MAAM,EAAE,UAAU,KAAK,CAAC;AAEnD,iBAAW,OAAO,OAAO,UAAU;AACjC,cAAM,SAAS,KAAK,QAAQ,KAAK,QAAQ,IAAI,GAAG,GAAG;AACnD,cAAM,UAAU,MAAM,GAAG,OAAO,QAAQ,OAAO,GAAG,GAAG,EAAE,KAAK,KAAK,CAAC;AAClE,YAAI,QAAQ,WAAW,GAAG;AACxB,iBAAO,KAAK;AAAA,YACV,MAAM;AAAA,YACN,UAAU;AAAA,YACV,SAAS,oBAAoB,GAAG;AAAA,YAChC;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO,EAAE,OAAO;AAClB;",
|
|
6
|
+
"names": ["parseDotenv", "loadDotenvFiles"]
|
|
7
7
|
}
|
package/dist/esm/resolver.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { applyInterpolationDeep } from "./strings.js";
|
|
2
2
|
import { getDeep, setDeep, deepMerge, isPlainObject } from "./util.js";
|
|
3
|
-
function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
3
|
+
function resolveRules({ rules, dims, ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
4
4
|
const indexed = [];
|
|
5
5
|
for (const r of rules) {
|
|
6
6
|
const scope = r.keySegments.slice(0, dims.length);
|
|
@@ -87,7 +87,7 @@ function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVa
|
|
|
87
87
|
continue;
|
|
88
88
|
}
|
|
89
89
|
}
|
|
90
|
-
const finalCfg = applyInterpolationDeep(out, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
90
|
+
const finalCfg = applyInterpolationDeep(out, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
91
91
|
Object.defineProperties(finalCfg, {
|
|
92
92
|
// Core methods
|
|
93
93
|
get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },
|
package/dist/esm/resolver.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/resolver.js"],
|
|
4
|
-
"sourcesContent": ["import { applyInterpolationDeep } from \"./strings.js\";\nimport { getDeep, setDeep, deepMerge, isPlainObject } from \"./util.js\";\n\nexport function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n const indexed = [];\n for (const r of rules) {\n const scope = r.keySegments.slice(0, dims.length);\n const keyPath = r.keySegments.slice(dims.length);\n\n if (scope.length !== dims.length || keyPath.length === 0) {\n throw new Error(\n `${r.filePath}:${r.line}: Key must have ${dims.length} scope segments + at least one key segment: ${r.keyRaw}`\n );\n }\n\n indexed.push({\n ...r,\n scope,\n keyPath,\n keyPathStr: keyPath.join(\".\"),\n });\n }\n\n const ctxScope = dims.map((d) => String(ctx[d] ?? \"\"));\n\n const candidatesByKey = new Map();\n const postOps = [];\n\n for (const r of indexed) {\n if (!matches(r.scope, ctxScope)) continue;\n\n if (r.op === \"+=\" || r.op === \"-=\") postOps.push(r);\n else {\n if (!candidatesByKey.has(r.keyPathStr)) candidatesByKey.set(r.keyPathStr, []);\n candidatesByKey.get(r.keyPathStr).push(r);\n }\n }\n\n const out = {};\n const trace = new Map();\n\n for (const [key, cand] of candidatesByKey.entries()) {\n let winner = cand[0];\n let best = specificity(winner.scope);\n for (let i = 1; i < cand.length; i++) {\n const s = specificity(cand[i].scope);\n if (s > best) {\n best = s;\n winner = cand[i];\n } else if (s === best) {\n winner = cand[i];\n }\n }\n\n const existing = getDeep(out, winner.keyPath);\n\n if (winner.op === \":=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n continue;\n }\n\n if (winner.op === \"|=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n } else if (isPlainObject(existing) && isPlainObject(winner.value)) {\n setDeep(out, winner.keyPath, deepMerge(existing, winner.value));\n } else {\n setDeep(out, winner.keyPath, clone(winner.value));\n }\n trace.set(key, packTrace(winner, best));\n continue;\n }\n\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n\n postOps.sort((a, b) => specificity(a.scope) - specificity(b.scope));\n\n for (const r of postOps) {\n const key = r.keyPathStr;\n const best = specificity(r.scope);\n\n const existing = getDeep(out, r.keyPath);\n const val = clone(r.value);\n\n if (r.op === \"+=\") {\n const add = Array.isArray(val) ? val : [val];\n if (existing === undefined) setDeep(out, r.keyPath, add);\n else if (Array.isArray(existing)) setDeep(out, r.keyPath, existing.concat(add));\n else setDeep(out, r.keyPath, [existing].concat(add));\n trace.set(key, packTrace(r, best));\n continue;\n }\n\n if (r.op === \"-=\") {\n const remove = new Set(Array.isArray(val) ? val : [val]);\n if (Array.isArray(existing)) {\n setDeep(out, r.keyPath, existing.filter((x) => !remove.has(x)));\n trace.set(key, packTrace(r, best));\n }\n continue;\n }\n }\n\n const finalCfg = applyInterpolationDeep(out, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n\n Object.defineProperties(finalCfg, {\n // Core methods\n get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },\n explain: { enumerable: false, value: (p) => explainByPath(trace, p) },\n\n // Serialization - returns a plain object copy without non-enumerable methods\n toJSON: {\n enumerable: false,\n value: () => {\n const plain = {};\n for (const [k, v] of Object.entries(finalCfg)) {\n plain[k] = clone(v);\n }\n return plain;\n },\n },\n\n // List all keys (dot-notation paths)\n keys: {\n enumerable: false,\n value: () => collectKeys(finalCfg),\n },\n\n // Diff against another config\n diff: {\n enumerable: false,\n value: (other) => diffConfigs(finalCfg, other),\n },\n\n // Internal trace data\n _trace: { enumerable: false, value: trace },\n });\n\n return finalCfg;\n}\n\n/**\n * Collect all leaf keys in dot notation\n * @param {object} obj\n * @param {string} prefix\n * @returns {string[]}\n */\nfunction collectKeys(obj, prefix = \"\") {\n const keys = [];\n\n for (const [k, v] of Object.entries(obj)) {\n const path = prefix ? `${prefix}.${k}` : k;\n\n if (isPlainObject(v)) {\n keys.push(...collectKeys(v, path));\n } else {\n keys.push(path);\n }\n }\n\n return keys.sort();\n}\n\n/**\n * Diff two configs, returning added, removed, and changed keys\n * @param {object} a - First config\n * @param {object} b - Second config\n * @returns {{ added: string[], removed: string[], changed: Array<{key: string, from: any, to: any}> }}\n */\nfunction diffConfigs(a, b) {\n const aKeys = new Set(collectKeys(a));\n const bKeys = new Set(collectKeys(b));\n\n const added = [];\n const removed = [];\n const changed = [];\n\n // Keys in b but not in a\n for (const key of bKeys) {\n if (!aKeys.has(key)) {\n added.push(key);\n }\n }\n\n // Keys in a but not in b\n for (const key of aKeys) {\n if (!bKeys.has(key)) {\n removed.push(key);\n }\n }\n\n // Keys in both - check for changes\n for (const key of aKeys) {\n if (bKeys.has(key)) {\n const aVal = getByPath(a, key);\n const bVal = getByPath(b, key);\n\n if (!deepEqual(aVal, bVal)) {\n changed.push({ key, from: aVal, to: bVal });\n }\n }\n }\n\n return {\n added: added.sort(),\n removed: removed.sort(),\n changed: changed.sort((x, y) => x.key.localeCompare(y.key)),\n };\n}\n\n/**\n * Deep equality check for config values.\n * Note: Does not handle circular references (will stack overflow).\n * Config values should never be circular in practice.\n */\nfunction deepEqual(a, b) {\n if (a === b) return true;\n if (typeof a !== typeof b) return false;\n if (a === null || b === null) return a === b;\n\n if (Array.isArray(a) && Array.isArray(b)) {\n if (a.length !== b.length) return false;\n return a.every((v, i) => deepEqual(v, b[i]));\n }\n\n if (typeof a === \"object\" && typeof b === \"object\") {\n const aKeys = Object.keys(a);\n const bKeys = Object.keys(b);\n if (aKeys.length !== bKeys.length) return false;\n return aKeys.every((k) => deepEqual(a[k], b[k]));\n }\n\n return false;\n}\n\nfunction getByPath(obj, p) {\n const segs = String(p).split(\".\").filter(Boolean);\n return getDeep(obj, segs);\n}\n\nfunction explainByPath(trace, p) {\n const key = String(p);\n return trace.get(key) ?? null;\n}\n\nfunction matches(ruleScope, ctxScope) {\n for (let i = 0; i < ruleScope.length; i++) {\n const r = String(ruleScope[i]);\n const c = String(ctxScope[i]);\n if (r === \"*\") continue;\n if (r !== c) return false;\n }\n return true;\n}\n\nfunction specificity(ruleScope) {\n let s = 0;\n for (const seg of ruleScope) if (seg !== \"*\") s++;\n return s;\n}\n\nfunction packTrace(rule, spec) {\n return {\n op: rule.op,\n scope: rule.scope.map(String),\n specificity: spec,\n filePath: rule.filePath,\n line: rule.line,\n col: rule.col ?? 0,\n keyRaw: rule.keyRaw,\n // Source map style location\n source: `${rule.filePath}:${rule.line}:${rule.col ?? 0}`,\n };\n}\n\nfunction clone(v) {\n if (v === null || v === undefined) return v;\n if (Array.isArray(v)) return v.map(clone);\n if (typeof v === \"object\") return structuredClone(v);\n return v;\n}\n"],
|
|
5
|
-
"mappings": "AAAA,SAAS,8BAA8B;AACvC,SAAS,SAAS,SAAS,WAAW,qBAAqB;AAEpD,SAAS,aAAa,EAAE,OAAO,MAAM,KAAK,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;
|
|
4
|
+
"sourcesContent": ["import { applyInterpolationDeep } from \"./strings.js\";\nimport { getDeep, setDeep, deepMerge, isPlainObject } from \"./util.js\";\n\nexport function resolveRules({ rules, dims, ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n const indexed = [];\n for (const r of rules) {\n const scope = r.keySegments.slice(0, dims.length);\n const keyPath = r.keySegments.slice(dims.length);\n\n if (scope.length !== dims.length || keyPath.length === 0) {\n throw new Error(\n `${r.filePath}:${r.line}: Key must have ${dims.length} scope segments + at least one key segment: ${r.keyRaw}`\n );\n }\n\n indexed.push({\n ...r,\n scope,\n keyPath,\n keyPathStr: keyPath.join(\".\"),\n });\n }\n\n const ctxScope = dims.map((d) => String(ctx[d] ?? \"\"));\n\n const candidatesByKey = new Map();\n const postOps = [];\n\n for (const r of indexed) {\n if (!matches(r.scope, ctxScope)) continue;\n\n if (r.op === \"+=\" || r.op === \"-=\") postOps.push(r);\n else {\n if (!candidatesByKey.has(r.keyPathStr)) candidatesByKey.set(r.keyPathStr, []);\n candidatesByKey.get(r.keyPathStr).push(r);\n }\n }\n\n const out = {};\n const trace = new Map();\n\n for (const [key, cand] of candidatesByKey.entries()) {\n let winner = cand[0];\n let best = specificity(winner.scope);\n for (let i = 1; i < cand.length; i++) {\n const s = specificity(cand[i].scope);\n if (s > best) {\n best = s;\n winner = cand[i];\n } else if (s === best) {\n winner = cand[i];\n }\n }\n\n const existing = getDeep(out, winner.keyPath);\n\n if (winner.op === \":=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n continue;\n }\n\n if (winner.op === \"|=\") {\n if (existing === undefined) {\n setDeep(out, winner.keyPath, clone(winner.value));\n } else if (isPlainObject(existing) && isPlainObject(winner.value)) {\n setDeep(out, winner.keyPath, deepMerge(existing, winner.value));\n } else {\n setDeep(out, winner.keyPath, clone(winner.value));\n }\n trace.set(key, packTrace(winner, best));\n continue;\n }\n\n setDeep(out, winner.keyPath, clone(winner.value));\n trace.set(key, packTrace(winner, best));\n }\n\n postOps.sort((a, b) => specificity(a.scope) - specificity(b.scope));\n\n for (const r of postOps) {\n const key = r.keyPathStr;\n const best = specificity(r.scope);\n\n const existing = getDeep(out, r.keyPath);\n const val = clone(r.value);\n\n if (r.op === \"+=\") {\n const add = Array.isArray(val) ? val : [val];\n if (existing === undefined) setDeep(out, r.keyPath, add);\n else if (Array.isArray(existing)) setDeep(out, r.keyPath, existing.concat(add));\n else setDeep(out, r.keyPath, [existing].concat(add));\n trace.set(key, packTrace(r, best));\n continue;\n }\n\n if (r.op === \"-=\") {\n const remove = new Set(Array.isArray(val) ? val : [val]);\n if (Array.isArray(existing)) {\n setDeep(out, r.keyPath, existing.filter((x) => !remove.has(x)));\n trace.set(key, packTrace(r, best));\n }\n continue;\n }\n }\n\n const finalCfg = applyInterpolationDeep(out, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n\n Object.defineProperties(finalCfg, {\n // Core methods\n get: { enumerable: false, value: (p) => getByPath(finalCfg, p) },\n explain: { enumerable: false, value: (p) => explainByPath(trace, p) },\n\n // Serialization - returns a plain object copy without non-enumerable methods\n toJSON: {\n enumerable: false,\n value: () => {\n const plain = {};\n for (const [k, v] of Object.entries(finalCfg)) {\n plain[k] = clone(v);\n }\n return plain;\n },\n },\n\n // List all keys (dot-notation paths)\n keys: {\n enumerable: false,\n value: () => collectKeys(finalCfg),\n },\n\n // Diff against another config\n diff: {\n enumerable: false,\n value: (other) => diffConfigs(finalCfg, other),\n },\n\n // Internal trace data\n _trace: { enumerable: false, value: trace },\n });\n\n return finalCfg;\n}\n\n/**\n * Collect all leaf keys in dot notation\n * @param {object} obj\n * @param {string} prefix\n * @returns {string[]}\n */\nfunction collectKeys(obj, prefix = \"\") {\n const keys = [];\n\n for (const [k, v] of Object.entries(obj)) {\n const path = prefix ? `${prefix}.${k}` : k;\n\n if (isPlainObject(v)) {\n keys.push(...collectKeys(v, path));\n } else {\n keys.push(path);\n }\n }\n\n return keys.sort();\n}\n\n/**\n * Diff two configs, returning added, removed, and changed keys\n * @param {object} a - First config\n * @param {object} b - Second config\n * @returns {{ added: string[], removed: string[], changed: Array<{key: string, from: any, to: any}> }}\n */\nfunction diffConfigs(a, b) {\n const aKeys = new Set(collectKeys(a));\n const bKeys = new Set(collectKeys(b));\n\n const added = [];\n const removed = [];\n const changed = [];\n\n // Keys in b but not in a\n for (const key of bKeys) {\n if (!aKeys.has(key)) {\n added.push(key);\n }\n }\n\n // Keys in a but not in b\n for (const key of aKeys) {\n if (!bKeys.has(key)) {\n removed.push(key);\n }\n }\n\n // Keys in both - check for changes\n for (const key of aKeys) {\n if (bKeys.has(key)) {\n const aVal = getByPath(a, key);\n const bVal = getByPath(b, key);\n\n if (!deepEqual(aVal, bVal)) {\n changed.push({ key, from: aVal, to: bVal });\n }\n }\n }\n\n return {\n added: added.sort(),\n removed: removed.sort(),\n changed: changed.sort((x, y) => x.key.localeCompare(y.key)),\n };\n}\n\n/**\n * Deep equality check for config values.\n * Note: Does not handle circular references (will stack overflow).\n * Config values should never be circular in practice.\n */\nfunction deepEqual(a, b) {\n if (a === b) return true;\n if (typeof a !== typeof b) return false;\n if (a === null || b === null) return a === b;\n\n if (Array.isArray(a) && Array.isArray(b)) {\n if (a.length !== b.length) return false;\n return a.every((v, i) => deepEqual(v, b[i]));\n }\n\n if (typeof a === \"object\" && typeof b === \"object\") {\n const aKeys = Object.keys(a);\n const bKeys = Object.keys(b);\n if (aKeys.length !== bKeys.length) return false;\n return aKeys.every((k) => deepEqual(a[k], b[k]));\n }\n\n return false;\n}\n\nfunction getByPath(obj, p) {\n const segs = String(p).split(\".\").filter(Boolean);\n return getDeep(obj, segs);\n}\n\nfunction explainByPath(trace, p) {\n const key = String(p);\n return trace.get(key) ?? null;\n}\n\nfunction matches(ruleScope, ctxScope) {\n for (let i = 0; i < ruleScope.length; i++) {\n const r = String(ruleScope[i]);\n const c = String(ctxScope[i]);\n if (r === \"*\") continue;\n if (r !== c) return false;\n }\n return true;\n}\n\nfunction specificity(ruleScope) {\n let s = 0;\n for (const seg of ruleScope) if (seg !== \"*\") s++;\n return s;\n}\n\nfunction packTrace(rule, spec) {\n return {\n op: rule.op,\n scope: rule.scope.map(String),\n specificity: spec,\n filePath: rule.filePath,\n line: rule.line,\n col: rule.col ?? 0,\n keyRaw: rule.keyRaw,\n // Source map style location\n source: `${rule.filePath}:${rule.line}:${rule.col ?? 0}`,\n };\n}\n\nfunction clone(v) {\n if (v === null || v === undefined) return v;\n if (Array.isArray(v)) return v.map(clone);\n if (typeof v === \"object\") return structuredClone(v);\n return v;\n}\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,8BAA8B;AACvC,SAAS,SAAS,SAAS,WAAW,qBAAqB;AAEpD,SAAS,aAAa,EAAE,OAAO,MAAM,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AAC7H,QAAM,UAAU,CAAC;AACjB,aAAW,KAAK,OAAO;AACrB,UAAM,QAAQ,EAAE,YAAY,MAAM,GAAG,KAAK,MAAM;AAChD,UAAM,UAAU,EAAE,YAAY,MAAM,KAAK,MAAM;AAE/C,QAAI,MAAM,WAAW,KAAK,UAAU,QAAQ,WAAW,GAAG;AACxD,YAAM,IAAI;AAAA,QACR,GAAG,EAAE,QAAQ,IAAI,EAAE,IAAI,mBAAmB,KAAK,MAAM,+CAA+C,EAAE,MAAM;AAAA,MAC9G;AAAA,IACF;AAEA,YAAQ,KAAK;AAAA,MACX,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA,YAAY,QAAQ,KAAK,GAAG;AAAA,IAC9B,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,KAAK,IAAI,CAAC,MAAM,OAAO,IAAI,CAAC,KAAK,EAAE,CAAC;AAErD,QAAM,kBAAkB,oBAAI,IAAI;AAChC,QAAM,UAAU,CAAC;AAEjB,aAAW,KAAK,SAAS;AACvB,QAAI,CAAC,QAAQ,EAAE,OAAO,QAAQ,EAAG;AAEjC,QAAI,EAAE,OAAO,QAAQ,EAAE,OAAO,KAAM,SAAQ,KAAK,CAAC;AAAA,SAC7C;AACH,UAAI,CAAC,gBAAgB,IAAI,EAAE,UAAU,EAAG,iBAAgB,IAAI,EAAE,YAAY,CAAC,CAAC;AAC5E,sBAAgB,IAAI,EAAE,UAAU,EAAE,KAAK,CAAC;AAAA,IAC1C;AAAA,EACF;AAEA,QAAM,MAAM,CAAC;AACb,QAAM,QAAQ,oBAAI,IAAI;AAEtB,aAAW,CAAC,KAAK,IAAI,KAAK,gBAAgB,QAAQ,GAAG;AACnD,QAAI,SAAS,KAAK,CAAC;AACnB,QAAI,OAAO,YAAY,OAAO,KAAK;AACnC,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,IAAI,YAAY,KAAK,CAAC,EAAE,KAAK;AACnC,UAAI,IAAI,MAAM;AACZ,eAAO;AACP,iBAAS,KAAK,CAAC;AAAA,MACjB,WAAW,MAAM,MAAM;AACrB,iBAAS,KAAK,CAAC;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,WAAW,QAAQ,KAAK,OAAO,OAAO;AAE5C,QAAI,OAAO,OAAO,MAAM;AACtB,UAAI,aAAa,QAAW;AAC1B,gBAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAChD,cAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AAAA,MACxC;AACA;AAAA,IACF;AAEA,QAAI,OAAO,OAAO,MAAM;AACtB,UAAI,aAAa,QAAW;AAC1B,gBAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAAA,MAClD,WAAW,cAAc,QAAQ,KAAK,cAAc,OAAO,KAAK,GAAG;AACjE,gBAAQ,KAAK,OAAO,SAAS,UAAU,UAAU,OAAO,KAAK,CAAC;AAAA,MAChE,OAAO;AACL,gBAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAAA,MAClD;AACA,YAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AACtC;AAAA,IACF;AAEA,YAAQ,KAAK,OAAO,SAAS,MAAM,OAAO,KAAK,CAAC;AAChD,UAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,CAAC;AAAA,EACxC;AAEA,UAAQ,KAAK,CAAC,GAAG,MAAM,YAAY,EAAE,KAAK,IAAI,YAAY,EAAE,KAAK,CAAC;AAElE,aAAW,KAAK,SAAS;AACvB,UAAM,MAAM,EAAE;AACd,UAAM,OAAO,YAAY,EAAE,KAAK;AAEhC,UAAM,WAAW,QAAQ,KAAK,EAAE,OAAO;AACvC,UAAM,MAAM,MAAM,EAAE,KAAK;AAEzB,QAAI,EAAE,OAAO,MAAM;AACjB,YAAM,MAAM,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAC3C,UAAI,aAAa,OAAW,SAAQ,KAAK,EAAE,SAAS,GAAG;AAAA,eAC9C,MAAM,QAAQ,QAAQ,EAAG,SAAQ,KAAK,EAAE,SAAS,SAAS,OAAO,GAAG,CAAC;AAAA,UACzE,SAAQ,KAAK,EAAE,SAAS,CAAC,QAAQ,EAAE,OAAO,GAAG,CAAC;AACnD,YAAM,IAAI,KAAK,UAAU,GAAG,IAAI,CAAC;AACjC;AAAA,IACF;AAEA,QAAI,EAAE,OAAO,MAAM;AACjB,YAAM,SAAS,IAAI,IAAI,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG,CAAC;AACvD,UAAI,MAAM,QAAQ,QAAQ,GAAG;AAC3B,gBAAQ,KAAK,EAAE,SAAS,SAAS,OAAO,CAAC,MAAM,CAAC,OAAO,IAAI,CAAC,CAAC,CAAC;AAC9D,cAAM,IAAI,KAAK,UAAU,GAAG,IAAI,CAAC;AAAA,MACnC;AACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,uBAAuB,KAAK,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAEtG,SAAO,iBAAiB,UAAU;AAAA;AAAA,IAEhC,KAAK,EAAE,YAAY,OAAO,OAAO,CAAC,MAAM,UAAU,UAAU,CAAC,EAAE;AAAA,IAC/D,SAAS,EAAE,YAAY,OAAO,OAAO,CAAC,MAAM,cAAc,OAAO,CAAC,EAAE;AAAA;AAAA,IAGpE,QAAQ;AAAA,MACN,YAAY;AAAA,MACZ,OAAO,MAAM;AACX,cAAM,QAAQ,CAAC;AACf,mBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAC7C,gBAAM,CAAC,IAAI,MAAM,CAAC;AAAA,QACpB;AACA,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA,IAGA,MAAM;AAAA,MACJ,YAAY;AAAA,MACZ,OAAO,MAAM,YAAY,QAAQ;AAAA,IACnC;AAAA;AAAA,IAGA,MAAM;AAAA,MACJ,YAAY;AAAA,MACZ,OAAO,CAAC,UAAU,YAAY,UAAU,KAAK;AAAA,IAC/C;AAAA;AAAA,IAGA,QAAQ,EAAE,YAAY,OAAO,OAAO,MAAM;AAAA,EAC5C,CAAC;AAED,SAAO;AACT;AAQA,SAAS,YAAY,KAAK,SAAS,IAAI;AACrC,QAAM,OAAO,CAAC;AAEd,aAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,GAAG,GAAG;AACxC,UAAM,OAAO,SAAS,GAAG,MAAM,IAAI,CAAC,KAAK;AAEzC,QAAI,cAAc,CAAC,GAAG;AACpB,WAAK,KAAK,GAAG,YAAY,GAAG,IAAI,CAAC;AAAA,IACnC,OAAO;AACL,WAAK,KAAK,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,SAAO,KAAK,KAAK;AACnB;AAQA,SAAS,YAAY,GAAG,GAAG;AACzB,QAAM,QAAQ,IAAI,IAAI,YAAY,CAAC,CAAC;AACpC,QAAM,QAAQ,IAAI,IAAI,YAAY,CAAC,CAAC;AAEpC,QAAM,QAAQ,CAAC;AACf,QAAM,UAAU,CAAC;AACjB,QAAM,UAAU,CAAC;AAGjB,aAAW,OAAO,OAAO;AACvB,QAAI,CAAC,MAAM,IAAI,GAAG,GAAG;AACnB,YAAM,KAAK,GAAG;AAAA,IAChB;AAAA,EACF;AAGA,aAAW,OAAO,OAAO;AACvB,QAAI,CAAC,MAAM,IAAI,GAAG,GAAG;AACnB,cAAQ,KAAK,GAAG;AAAA,IAClB;AAAA,EACF;AAGA,aAAW,OAAO,OAAO;AACvB,QAAI,MAAM,IAAI,GAAG,GAAG;AAClB,YAAM,OAAO,UAAU,GAAG,GAAG;AAC7B,YAAM,OAAO,UAAU,GAAG,GAAG;AAE7B,UAAI,CAAC,UAAU,MAAM,IAAI,GAAG;AAC1B,gBAAQ,KAAK,EAAE,KAAK,MAAM,MAAM,IAAI,KAAK,CAAC;AAAA,MAC5C;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,MAAM,KAAK;AAAA,IAClB,SAAS,QAAQ,KAAK;AAAA,IACtB,SAAS,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,IAAI,cAAc,EAAE,GAAG,CAAC;AAAA,EAC5D;AACF;AAOA,SAAS,UAAU,GAAG,GAAG;AACvB,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,OAAO,MAAM,OAAO,EAAG,QAAO;AAClC,MAAI,MAAM,QAAQ,MAAM,KAAM,QAAO,MAAM;AAE3C,MAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AACxC,QAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAClC,WAAO,EAAE,MAAM,CAAC,GAAG,MAAM,UAAU,GAAG,EAAE,CAAC,CAAC,CAAC;AAAA,EAC7C;AAEA,MAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAClD,UAAM,QAAQ,OAAO,KAAK,CAAC;AAC3B,UAAM,QAAQ,OAAO,KAAK,CAAC;AAC3B,QAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAC1C,WAAO,MAAM,MAAM,CAAC,MAAM,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;AAAA,EACjD;AAEA,SAAO;AACT;AAEA,SAAS,UAAU,KAAK,GAAG;AACzB,QAAM,OAAO,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,OAAO,OAAO;AAChD,SAAO,QAAQ,KAAK,IAAI;AAC1B;AAEA,SAAS,cAAc,OAAO,GAAG;AAC/B,QAAM,MAAM,OAAO,CAAC;AACpB,SAAO,MAAM,IAAI,GAAG,KAAK;AAC3B;AAEA,SAAS,QAAQ,WAAW,UAAU;AACpC,WAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACzC,UAAM,IAAI,OAAO,UAAU,CAAC,CAAC;AAC7B,UAAM,IAAI,OAAO,SAAS,CAAC,CAAC;AAC5B,QAAI,MAAM,IAAK;AACf,QAAI,MAAM,EAAG,QAAO;AAAA,EACtB;AACA,SAAO;AACT;AAEA,SAAS,YAAY,WAAW;AAC9B,MAAI,IAAI;AACR,aAAW,OAAO,UAAW,KAAI,QAAQ,IAAK;AAC9C,SAAO;AACT;AAEA,SAAS,UAAU,MAAM,MAAM;AAC7B,SAAO;AAAA,IACL,IAAI,KAAK;AAAA,IACT,OAAO,KAAK,MAAM,IAAI,MAAM;AAAA,IAC5B,aAAa;AAAA,IACb,UAAU,KAAK;AAAA,IACf,MAAM,KAAK;AAAA,IACX,KAAK,KAAK,OAAO;AAAA,IACjB,QAAQ,KAAK;AAAA;AAAA,IAEb,QAAQ,GAAG,KAAK,QAAQ,IAAI,KAAK,IAAI,IAAI,KAAK,OAAO,CAAC;AAAA,EACxD;AACF;AAEA,SAAS,MAAM,GAAG;AAChB,MAAI,MAAM,QAAQ,MAAM,OAAW,QAAO;AAC1C,MAAI,MAAM,QAAQ,CAAC,EAAG,QAAO,EAAE,IAAI,KAAK;AACxC,MAAI,OAAO,MAAM,SAAU,QAAO,gBAAgB,CAAC;AACnD,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/esm/strings.js
CHANGED
|
@@ -1,18 +1,19 @@
|
|
|
1
1
|
import { isPlainObject } from "./util.js";
|
|
2
2
|
const RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;
|
|
3
|
-
function applyInterpolationDeep(value, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
4
|
-
if (typeof value === "string") return interpolate(value, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
5
|
-
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings }));
|
|
3
|
+
function applyInterpolationDeep(value, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
4
|
+
if (typeof value === "string") return interpolate(value, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
5
|
+
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings }));
|
|
6
6
|
if (isPlainObject(value)) {
|
|
7
7
|
const out = {};
|
|
8
8
|
for (const [k, v] of Object.entries(value)) {
|
|
9
|
-
out[k] = applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
9
|
+
out[k] = applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
10
10
|
}
|
|
11
11
|
return out;
|
|
12
12
|
}
|
|
13
13
|
return value;
|
|
14
14
|
}
|
|
15
|
-
function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
15
|
+
function interpolate(s, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
16
|
+
const envSource = env ? { ...process.env, ...env } : process.env;
|
|
16
17
|
return s.replace(/\$\{([^}]+)\}/g, (match, exprRaw) => {
|
|
17
18
|
const expr = exprRaw.trim();
|
|
18
19
|
if (expr.startsWith("ENV:")) {
|
|
@@ -25,7 +26,7 @@ function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null,
|
|
|
25
26
|
});
|
|
26
27
|
return "";
|
|
27
28
|
}
|
|
28
|
-
return
|
|
29
|
+
return envSource[k] ?? "";
|
|
29
30
|
}
|
|
30
31
|
if (expr.startsWith("VAR:")) {
|
|
31
32
|
const k = expr.slice(4).trim();
|
package/dist/esm/strings.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/strings.js"],
|
|
4
|
-
"sourcesContent": ["import { isPlainObject } from \"./util.js\";\n\n// Regex to detect secret provider patterns: ${PROVIDER:key}\n// Provider must start with uppercase letter, followed by uppercase letters, digits, or underscores\nconst RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;\n\nexport function applyInterpolationDeep(value, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n if (typeof value === \"string\") return interpolate(value, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings }));\n if (isPlainObject(value)) {\n const out = {};\n for (const [k, v] of Object.entries(value)) {\n out[k] = applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings });\n }\n return out;\n }\n return value;\n}\n\nexport function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n return s.replace(/\\$\\{([^}]+)\\}/g, (match, exprRaw) => {\n const expr = exprRaw.trim();\n\n if (expr.startsWith(\"ENV:\")) {\n const k = expr.slice(4).trim();\n // Security: check if env var is allowed\n if (allowedEnvVars !== null && !allowedEnvVars.includes(k)) {\n warnings.push({\n type: \"blocked_env_var\",\n var: k,\n message: `Access to environment variable \"${k}\" blocked by allowedEnvVars whitelist`,\n });\n return \"\";\n }\n return
|
|
5
|
-
"mappings": "AAAA,SAAS,qBAAqB;AAI9B,MAAM,qBAAqB;AAEpB,SAAS,uBAAuB,OAAO,EAAE,KAAK,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;
|
|
4
|
+
"sourcesContent": ["import { isPlainObject } from \"./util.js\";\n\n// Regex to detect secret provider patterns: ${PROVIDER:key}\n// Provider must start with uppercase letter, followed by uppercase letters, digits, or underscores\nconst RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;\n\nexport function applyInterpolationDeep(value, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n if (typeof value === \"string\") return interpolate(value, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings }));\n if (isPlainObject(value)) {\n const out = {};\n for (const [k, v] of Object.entries(value)) {\n out[k] = applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });\n }\n return out;\n }\n return value;\n}\n\nexport function interpolate(s, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {\n // Merge env with process.env (env takes precedence)\n const envSource = env ? { ...process.env, ...env } : process.env;\n\n return s.replace(/\\$\\{([^}]+)\\}/g, (match, exprRaw) => {\n const expr = exprRaw.trim();\n\n if (expr.startsWith(\"ENV:\")) {\n const k = expr.slice(4).trim();\n // Security: check if env var is allowed\n if (allowedEnvVars !== null && !allowedEnvVars.includes(k)) {\n warnings.push({\n type: \"blocked_env_var\",\n var: k,\n message: `Access to environment variable \"${k}\" blocked by allowedEnvVars whitelist`,\n });\n return \"\";\n }\n return envSource[k] ?? \"\";\n }\n\n if (expr.startsWith(\"VAR:\")) {\n const k = expr.slice(4).trim();\n // Security: check if custom var is allowed\n if (allowedVars !== null && !allowedVars.includes(k)) {\n warnings.push({\n type: \"blocked_var\",\n var: k,\n message: `Access to custom variable \"${k}\" blocked by allowedVars whitelist`,\n });\n return \"\";\n }\n if (vars?.[k] === undefined) {\n warnings.push({\n type: \"unknown_var\",\n var: k,\n message: `Unknown VAR \"${k}\" in interpolation ${match}`,\n });\n }\n return String(vars?.[k] ?? \"\");\n }\n\n if (expr.startsWith(\"ctx.\")) {\n const k = expr.slice(4).trim();\n if (ctx?.[k] === undefined) {\n warnings.push({\n type: \"unknown_ctx\",\n var: k,\n message: `Unknown ctx dimension \"${k}\" in interpolation ${match}`,\n });\n }\n return String(ctx?.[k] ?? \"\");\n }\n\n // Secret provider patterns (e.g., ${AWS:...}, ${OPENBAO:...}) - leave for secrets.js\n if (RX_SECRET_PROVIDER.test(expr)) {\n return match; // Keep the pattern intact for later secret processing\n }\n\n // Unknown interpolation pattern\n warnings.push({\n type: \"unknown_interpolation\",\n pattern: match,\n message: `Unknown interpolation pattern: ${match}`,\n });\n return \"\";\n });\n}\n"],
|
|
5
|
+
"mappings": "AAAA,SAAS,qBAAqB;AAI9B,MAAM,qBAAqB;AAEpB,SAAS,uBAAuB,OAAO,EAAE,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AACjI,MAAI,OAAO,UAAU,SAAU,QAAO,YAAY,OAAO,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAClH,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO,MAAM,IAAI,CAAC,MAAM,uBAAuB,GAAG,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC,CAAC;AACtI,MAAI,cAAc,KAAK,GAAG;AACxB,UAAM,MAAM,CAAC;AACb,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,UAAI,CAAC,IAAI,uBAAuB,GAAG,EAAE,KAAK,MAAM,KAAK,gBAAgB,aAAa,SAAS,CAAC;AAAA,IAC9F;AACA,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,YAAY,GAAG,EAAE,KAAK,MAAM,MAAM,MAAM,iBAAiB,MAAM,cAAc,MAAM,WAAW,CAAC,EAAE,GAAG;AAElH,QAAM,YAAY,MAAM,EAAE,GAAG,QAAQ,KAAK,GAAG,IAAI,IAAI,QAAQ;AAE7D,SAAO,EAAE,QAAQ,kBAAkB,CAAC,OAAO,YAAY;AACrD,UAAM,OAAO,QAAQ,KAAK;AAE1B,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAE7B,UAAI,mBAAmB,QAAQ,CAAC,eAAe,SAAS,CAAC,GAAG;AAC1D,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,mCAAmC,CAAC;AAAA,QAC/C,CAAC;AACD,eAAO;AAAA,MACT;AACA,aAAO,UAAU,CAAC,KAAK;AAAA,IACzB;AAEA,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAE7B,UAAI,gBAAgB,QAAQ,CAAC,YAAY,SAAS,CAAC,GAAG;AACpD,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,8BAA8B,CAAC;AAAA,QAC1C,CAAC;AACD,eAAO;AAAA,MACT;AACA,UAAI,OAAO,CAAC,MAAM,QAAW;AAC3B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,gBAAgB,CAAC,sBAAsB,KAAK;AAAA,QACvD,CAAC;AAAA,MACH;AACA,aAAO,OAAO,OAAO,CAAC,KAAK,EAAE;AAAA,IAC/B;AAEA,QAAI,KAAK,WAAW,MAAM,GAAG;AAC3B,YAAM,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK;AAC7B,UAAI,MAAM,CAAC,MAAM,QAAW;AAC1B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,KAAK;AAAA,UACL,SAAS,0BAA0B,CAAC,sBAAsB,KAAK;AAAA,QACjE,CAAC;AAAA,MACH;AACA,aAAO,OAAO,MAAM,CAAC,KAAK,EAAE;AAAA,IAC9B;AAGA,QAAI,mBAAmB,KAAK,IAAI,GAAG;AACjC,aAAO;AAAA,IACT;AAGA,aAAS,KAAK;AAAA,MACZ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,kCAAkC,KAAK;AAAA,IAClD,CAAC;AACD,WAAO;AAAA,EACT,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
package/src/dotenv.js
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Parse a dotenv file and return key-value pairs
|
|
6
|
+
* @param {string} content - The content of the .env file
|
|
7
|
+
* @returns {Record<string, string>}
|
|
8
|
+
*/
|
|
9
|
+
export function parseDotenv(content) {
|
|
10
|
+
const result = {};
|
|
11
|
+
const lines = content.split(/\r?\n/);
|
|
12
|
+
|
|
13
|
+
for (let line of lines) {
|
|
14
|
+
// Remove comments (lines starting with # or lines with # after value)
|
|
15
|
+
line = line.trim();
|
|
16
|
+
if (!line || line.startsWith("#")) continue;
|
|
17
|
+
|
|
18
|
+
// Match KEY=VALUE or KEY="VALUE" or KEY='VALUE'
|
|
19
|
+
const match = line.match(/^([^=]+?)\s*=\s*(.*)$/);
|
|
20
|
+
if (!match) continue;
|
|
21
|
+
|
|
22
|
+
const key = match[1].trim();
|
|
23
|
+
let value = match[2].trim();
|
|
24
|
+
|
|
25
|
+
// Handle quoted values
|
|
26
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
27
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
28
|
+
value = value.slice(1, -1);
|
|
29
|
+
// Unescape common escape sequences for double-quoted strings
|
|
30
|
+
if (value.startsWith('"')) {
|
|
31
|
+
value = value
|
|
32
|
+
.replace(/\\n/g, "\n")
|
|
33
|
+
.replace(/\\r/g, "\r")
|
|
34
|
+
.replace(/\\t/g, "\t")
|
|
35
|
+
.replace(/\\"/g, '"')
|
|
36
|
+
.replace(/\\\\/g, "\\");
|
|
37
|
+
}
|
|
38
|
+
} else {
|
|
39
|
+
// Remove inline comments for unquoted values
|
|
40
|
+
const commentIdx = value.indexOf(" #");
|
|
41
|
+
if (commentIdx !== -1) {
|
|
42
|
+
value = value.slice(0, commentIdx).trim();
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
result[key] = value;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return result;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Load a dotenv file from disk
|
|
54
|
+
* @param {string} filePath - Path to the .env file
|
|
55
|
+
* @returns {Promise<Record<string, string>>}
|
|
56
|
+
*/
|
|
57
|
+
export async function loadDotenv(filePath) {
|
|
58
|
+
const content = await fs.readFile(path.resolve(filePath), "utf8");
|
|
59
|
+
return parseDotenv(content);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Load multiple dotenv files, with later files overriding earlier ones
|
|
64
|
+
* @param {string[]} filePaths - Paths to .env files
|
|
65
|
+
* @returns {Promise<Record<string, string>>}
|
|
66
|
+
*/
|
|
67
|
+
export async function loadDotenvFiles(filePaths) {
|
|
68
|
+
const result = {};
|
|
69
|
+
|
|
70
|
+
for (const filePath of filePaths) {
|
|
71
|
+
try {
|
|
72
|
+
const vars = await loadDotenv(filePath);
|
|
73
|
+
Object.assign(result, vars);
|
|
74
|
+
} catch (err) {
|
|
75
|
+
// File doesn't exist - silently skip
|
|
76
|
+
if (err.code !== "ENOENT") throw err;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return result;
|
|
81
|
+
}
|
package/src/index.d.ts
CHANGED
|
@@ -60,6 +60,13 @@ export interface LoadColonyOptions {
|
|
|
60
60
|
vars?: Record<string, string>;
|
|
61
61
|
/** Schema validation hook (supports sync and async) */
|
|
62
62
|
schema?: (cfg: ColonyConfig) => ColonyConfig | Promise<ColonyConfig>;
|
|
63
|
+
/**
|
|
64
|
+
* Load environment variables from dotenv file(s).
|
|
65
|
+
* - `true`: Load from [".env", ".env.local"]
|
|
66
|
+
* - `string`: Load from single file path
|
|
67
|
+
* - `string[]`: Load from multiple file paths (later files override)
|
|
68
|
+
*/
|
|
69
|
+
dotenv?: boolean | string | string[];
|
|
63
70
|
/** Security sandbox options */
|
|
64
71
|
sandbox?: SandboxOptions;
|
|
65
72
|
/** Warn when skipping already-visited includes */
|
|
@@ -340,3 +347,22 @@ export class OpenBaoProvider implements SecretProvider {
|
|
|
340
347
|
fetch(key: string): Promise<string>;
|
|
341
348
|
validate(): Promise<void>;
|
|
342
349
|
}
|
|
350
|
+
|
|
351
|
+
// ============================================================================
|
|
352
|
+
// Dotenv
|
|
353
|
+
// ============================================================================
|
|
354
|
+
|
|
355
|
+
/**
|
|
356
|
+
* Parse dotenv file content and return key-value pairs
|
|
357
|
+
*/
|
|
358
|
+
export function parseDotenv(content: string): Record<string, string>;
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Load environment variables from a dotenv file
|
|
362
|
+
*/
|
|
363
|
+
export function loadDotenv(filePath: string): Promise<Record<string, string>>;
|
|
364
|
+
|
|
365
|
+
/**
|
|
366
|
+
* Load multiple dotenv files (later files override earlier ones)
|
|
367
|
+
*/
|
|
368
|
+
export function loadDotenvFiles(filePaths: string[]): Promise<Record<string, string>>;
|
package/src/index.js
CHANGED
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
unregisterSecretProvider,
|
|
12
12
|
clearSecretProviders,
|
|
13
13
|
} from "./secrets.js";
|
|
14
|
+
import { loadDotenvFiles, parseDotenv } from "./dotenv.js";
|
|
14
15
|
|
|
15
16
|
// Re-export secrets functions
|
|
16
17
|
export { registerSecretProvider, unregisterSecretProvider, clearSecretProviders };
|
|
@@ -20,6 +21,9 @@ export { AwsSecretsProvider } from "./providers/aws.js";
|
|
|
20
21
|
export { VaultProvider } from "./providers/vault.js";
|
|
21
22
|
export { OpenBaoProvider } from "./providers/openbao.js";
|
|
22
23
|
|
|
24
|
+
// Re-export dotenv functions
|
|
25
|
+
export { parseDotenv, loadDotenv, loadDotenvFiles } from "./dotenv.js";
|
|
26
|
+
|
|
23
27
|
/**
|
|
24
28
|
* @param {object} opts
|
|
25
29
|
* @param {string} opts.entry
|
|
@@ -27,6 +31,7 @@ export { OpenBaoProvider } from "./providers/openbao.js";
|
|
|
27
31
|
* @param {Record<string,string>=} opts.ctx
|
|
28
32
|
* @param {Record<string,string>=} opts.vars
|
|
29
33
|
* @param {(cfg: any) => any=} opts.schema // optional validation hook (e.g. zod.parse)
|
|
34
|
+
* @param {string|string[]|boolean=} opts.dotenv // dotenv file path(s), or true for ['.env', '.env.local']
|
|
30
35
|
* @param {object=} opts.sandbox // security options
|
|
31
36
|
* @param {string=} opts.sandbox.basePath // restrict includes to this directory
|
|
32
37
|
* @param {string[]=} opts.sandbox.allowedEnvVars // whitelist of allowed env vars (null = allow all)
|
|
@@ -80,6 +85,22 @@ export async function loadColony(opts) {
|
|
|
80
85
|
|
|
81
86
|
const vars = { ROOT: process.cwd(), ...(opts.vars ?? {}) };
|
|
82
87
|
|
|
88
|
+
// Load dotenv files if configured
|
|
89
|
+
let env = null;
|
|
90
|
+
if (opts.dotenv) {
|
|
91
|
+
let dotenvPaths;
|
|
92
|
+
if (opts.dotenv === true) {
|
|
93
|
+
dotenvPaths = [".env", ".env.local"];
|
|
94
|
+
} else if (typeof opts.dotenv === "string") {
|
|
95
|
+
dotenvPaths = [opts.dotenv];
|
|
96
|
+
} else if (Array.isArray(opts.dotenv)) {
|
|
97
|
+
dotenvPaths = opts.dotenv;
|
|
98
|
+
}
|
|
99
|
+
if (dotenvPaths) {
|
|
100
|
+
env = await loadDotenvFiles(dotenvPaths);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
83
104
|
// Collect requires from all parsed files
|
|
84
105
|
const requires = parsed.flatMap((p) => p.requires ?? []);
|
|
85
106
|
|
|
@@ -87,7 +108,7 @@ export async function loadColony(opts) {
|
|
|
87
108
|
|
|
88
109
|
const allowedEnvVars = sandbox.allowedEnvVars ?? null;
|
|
89
110
|
const allowedVars = sandbox.allowedVars ?? null;
|
|
90
|
-
let cfg = resolveRules({ rules: allRules, dims, ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
111
|
+
let cfg = resolveRules({ rules: allRules, dims, ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
91
112
|
|
|
92
113
|
// Apply secrets if providers are configured
|
|
93
114
|
const secretsOpts = opts.secrets ?? {};
|
package/src/resolver.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { applyInterpolationDeep } from "./strings.js";
|
|
2
2
|
import { getDeep, setDeep, deepMerge, isPlainObject } from "./util.js";
|
|
3
3
|
|
|
4
|
-
export function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
4
|
+
export function resolveRules({ rules, dims, ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
5
5
|
const indexed = [];
|
|
6
6
|
for (const r of rules) {
|
|
7
7
|
const scope = r.keySegments.slice(0, dims.length);
|
|
@@ -106,7 +106,7 @@ export function resolveRules({ rules, dims, ctx, vars, allowedEnvVars = null, al
|
|
|
106
106
|
}
|
|
107
107
|
}
|
|
108
108
|
|
|
109
|
-
const finalCfg = applyInterpolationDeep(out, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
109
|
+
const finalCfg = applyInterpolationDeep(out, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
110
110
|
|
|
111
111
|
Object.defineProperties(finalCfg, {
|
|
112
112
|
// Core methods
|
package/src/strings.js
CHANGED
|
@@ -4,20 +4,23 @@ import { isPlainObject } from "./util.js";
|
|
|
4
4
|
// Provider must start with uppercase letter, followed by uppercase letters, digits, or underscores
|
|
5
5
|
const RX_SECRET_PROVIDER = /^[A-Z][A-Z0-9_]*:/;
|
|
6
6
|
|
|
7
|
-
export function applyInterpolationDeep(value, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
8
|
-
if (typeof value === "string") return interpolate(value, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
9
|
-
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings }));
|
|
7
|
+
export function applyInterpolationDeep(value, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
8
|
+
if (typeof value === "string") return interpolate(value, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
9
|
+
if (Array.isArray(value)) return value.map((v) => applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings }));
|
|
10
10
|
if (isPlainObject(value)) {
|
|
11
11
|
const out = {};
|
|
12
12
|
for (const [k, v] of Object.entries(value)) {
|
|
13
|
-
out[k] = applyInterpolationDeep(v, { ctx, vars, allowedEnvVars, allowedVars, warnings });
|
|
13
|
+
out[k] = applyInterpolationDeep(v, { ctx, vars, env, allowedEnvVars, allowedVars, warnings });
|
|
14
14
|
}
|
|
15
15
|
return out;
|
|
16
16
|
}
|
|
17
17
|
return value;
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
-
export function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
20
|
+
export function interpolate(s, { ctx, vars, env = null, allowedEnvVars = null, allowedVars = null, warnings = [] }) {
|
|
21
|
+
// Merge env with process.env (env takes precedence)
|
|
22
|
+
const envSource = env ? { ...process.env, ...env } : process.env;
|
|
23
|
+
|
|
21
24
|
return s.replace(/\$\{([^}]+)\}/g, (match, exprRaw) => {
|
|
22
25
|
const expr = exprRaw.trim();
|
|
23
26
|
|
|
@@ -32,7 +35,7 @@ export function interpolate(s, { ctx, vars, allowedEnvVars = null, allowedVars =
|
|
|
32
35
|
});
|
|
33
36
|
return "";
|
|
34
37
|
}
|
|
35
|
-
return
|
|
38
|
+
return envSource[k] ?? "";
|
|
36
39
|
}
|
|
37
40
|
|
|
38
41
|
if (expr.startsWith("VAR:")) {
|