perfshield 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Vincent Riemer
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,126 @@
1
+ # perfshield
2
+
3
+ perfshield is a build-system-agnostic benchmarking tool for comparing two
4
+ versions of a JavaScript library across JS runtimes. It borrows statistical
5
+ ideas from Tachometer (confidence intervals, adaptive sampling) and focuses on
6
+ runtime JS engines (Node/V8 today).
7
+
8
+ ## What it does
9
+
10
+ - Builds your benchmark bundle using your own build command.
11
+ - Saves a baseline bundle (`prepare`).
12
+ - Builds the current bundle and compares it to the baseline (`compare`).
13
+ - Reports results in console and/or JSON.
14
+ - Exits with code 1 when a regression is detected (CI excludes 0 in the slower direction).
15
+
16
+ ## Requirements
17
+
18
+ - Node.js (used for the CLI and the Node engine).
19
+ - A build command that emits a single ESM benchmark bundle.
20
+
21
+ ## Install
22
+
23
+ ```
24
+ pnpm add -D perfshield
25
+ ```
26
+
27
+ ## Quick start
28
+
29
+ 1. Create a benchmark module that exports `benchmarks`:
30
+
31
+ ```js
32
+ export const benchmarks = [
33
+ {
34
+ name: "sum",
35
+ iterations: 1,
36
+ async fn() {
37
+ let total = 0;
38
+ for (let i = 0; i < 10_000; i += 1) {
39
+ total += i;
40
+ }
41
+ return total;
42
+ },
43
+ },
44
+ ];
45
+ ```
46
+
47
+ 2. Create a `perfshield.config.json`:
48
+
49
+ ```json
50
+ {
51
+ "artifactsDir": ".perfshield",
52
+ "build": {
53
+ "command": "pnpm run build:bench",
54
+ "output": "dist/bench.js"
55
+ },
56
+ "engines": [
57
+ {
58
+ "name": "node",
59
+ "command": "node"
60
+ }
61
+ ],
62
+ "sampling": {
63
+ "minSamples": 30,
64
+ "timeoutMs": 10000,
65
+ "conditions": {
66
+ "absolute": [0],
67
+ "relative": [0]
68
+ }
69
+ },
70
+ "report": {
71
+ "formats": ["console", "json"]
72
+ }
73
+ }
74
+ ```
75
+
76
+ 3. Run the workflow:
77
+
78
+ ```
79
+ perfshield prepare --config perfshield.config.json
80
+ perfshield compare --config perfshield.config.json
81
+ ```
82
+
83
+ ## Benchmark bundle contract
84
+
85
+ The build output must be a single ESM file that exports:
86
+
87
+ ```js
88
+ export const benchmarks = [
89
+ {
90
+ name: "example",
91
+ fn: async () => {},
92
+ setup: async () => {},
93
+ teardown: async () => {},
94
+ unit: "ms",
95
+ iterations: 1,
96
+ metadata: { tag: "optional" },
97
+ },
98
+ ];
99
+ ```
100
+
101
+ Rules:
102
+
103
+ - `benchmarks` is required.
104
+ - `fn` is required.
105
+ - `setup`/`teardown` run once per sample (per benchmark, per version).
106
+ - If `setup` throws, the sample fails and the run is aborted.
107
+ - `iterations` repeats `fn` inside a single sample to reduce noise.
108
+
109
+ ## Artifacts
110
+
111
+ - `prepare` stores a baseline bundle in `artifactsDir` (default `.perfshield`).
112
+ - `compare` uses the stored baseline and the newly built bundle.
113
+
114
+ ## Reports
115
+
116
+ Supported formats:
117
+
118
+ - `console`: human‑readable summary.
119
+ - `json`: machine‑readable report.
120
+
121
+ If any benchmark shows a regression (CI excludes 0 in the slower direction),
122
+ the process exits with code 1.
123
+
124
+ ## Examples
125
+
126
+ See `examples/simple` for a minimal setup you can run locally.
@@ -0,0 +1,19 @@
1
+ # Simple Example
2
+
3
+ This example shows the simplest possible setup: a build script that copies a
4
+ benchmark module into `dist/bench.js`, plus a minimal configuration.
5
+
6
+ ## Run
7
+
8
+ ```
9
+ cd examples/simple
10
+ node build.mjs
11
+ perfshield prepare --config perfshield.config.json
12
+ perfshield compare --config perfshield.config.json
13
+ ```
14
+
15
+ ## Files
16
+
17
+ - `bench-source.js`: benchmark module that exports `benchmarks`.
18
+ - `build.mjs`: builds `dist/bench.js`.
19
+ - `perfshield.config.json`: configuration for the CLI.
@@ -0,0 +1,13 @@
1
+ export const benchmarks = [
2
+ {
3
+ async fn() {
4
+ let total = 0;
5
+ for (let i = 0; i < 10_000; i += 1) {
6
+ total += i;
7
+ }
8
+ return total;
9
+ },
10
+ iterations: 1,
11
+ name: "sum",
12
+ },
13
+ ];
@@ -0,0 +1,17 @@
1
+ /* eslint-disable unicorn/prefer-top-level-await */
2
+ import { mkdir, readFile, writeFile } from "node:fs/promises";
3
+ import { dirname, resolve } from "node:path";
4
+
5
+ const outputPath = resolve("dist/bench.js");
6
+
7
+ const main = async () => {
8
+ await mkdir(dirname(outputPath), { recursive: true });
9
+ const source = await readFile("bench-source.js", "utf8");
10
+ await writeFile(outputPath, source, "utf8");
11
+ };
12
+
13
+ main().catch((error) => {
14
+ // eslint-disable-next-line no-console
15
+ console.error(error);
16
+ process.exitCode = 1;
17
+ });
@@ -0,0 +1,24 @@
1
+ {
2
+ "artifactsDir": ".perfshield",
3
+ "build": {
4
+ "command": "node build.mjs",
5
+ "output": "dist/bench.js"
6
+ },
7
+ "engines": [
8
+ {
9
+ "name": "node",
10
+ "command": "node"
11
+ }
12
+ ],
13
+ "sampling": {
14
+ "minSamples": 30,
15
+ "timeoutMs": 10000,
16
+ "conditions": {
17
+ "absolute": [0],
18
+ "relative": [0]
19
+ }
20
+ },
21
+ "report": {
22
+ "formats": ["console", "json"]
23
+ }
24
+ }
@@ -0,0 +1,19 @@
1
+ import { access, copyFile, mkdir } from "node:fs/promises";
2
+ import { basename, join, resolve } from "node:path";
3
+ const getArtifactsDir = config => resolve(config.artifactsDir);
4
+ const getBaselineDir = config => join(getArtifactsDir(config), "baseline");
5
+ export const getBaselinePath = config => join(getBaselineDir(config), basename(config.build.output));
6
+ export const saveBaseline = async (config, outputPath) => {
7
+ const baselineDir = getBaselineDir(config);
8
+ await mkdir(baselineDir, {
9
+ recursive: true
10
+ });
11
+ const baselinePath = getBaselinePath(config);
12
+ await copyFile(outputPath, baselinePath);
13
+ return baselinePath;
14
+ };
15
+ export const ensureBaseline = async config => {
16
+ const baselinePath = getBaselinePath(config);
17
+ await access(baselinePath);
18
+ return baselinePath;
19
+ };
package/lib/build.js ADDED
@@ -0,0 +1,27 @@
1
+ import { spawn } from "node:child_process";
2
+ import { access } from "node:fs/promises";
3
+ import { resolve } from "node:path";
4
+ const runCommand = (command, cwd) => new Promise((resolvePromise, reject) => {
5
+ const child = spawn(command, {
6
+ cwd,
7
+ shell: true,
8
+ stdio: "inherit"
9
+ });
10
+ child.on("error", error => {
11
+ reject(error);
12
+ });
13
+ child.on("exit", code => {
14
+ if (code === 0) {
15
+ resolvePromise();
16
+ } else {
17
+ reject(new Error(`Build command failed with exit code ${code ?? "?"}.`));
18
+ }
19
+ });
20
+ });
21
+ export const runBuild = async build => {
22
+ const cwd = resolve(build.cwd ?? process.cwd());
23
+ await runCommand(build.command, cwd);
24
+ const outputPath = resolve(cwd, build.output);
25
+ await access(outputPath);
26
+ return outputPath;
27
+ };
package/lib/cli.js ADDED
@@ -0,0 +1,74 @@
1
+ #!/usr/bin/env node
2
+ import { ensureBaseline, saveBaseline } from "./artifacts.js";
3
+ import { runBuild } from "./build.js";
4
+ import { ConfigError, formatConfigError, loadConfig } from "./config.js";
5
+ import { getRegressions } from "./regression.js";
6
+ import { renderReports } from "./report/index.js";
7
+ import { runEngineComparison } from "./runner.js";
8
+ const usage = () => {
9
+ console.error("Usage: perfshield <prepare|compare> [--config path]");
10
+ };
11
+ const getFlagValue = (args, flag) => {
12
+ const index = args.indexOf(flag);
13
+ if (index === -1) {
14
+ return null;
15
+ }
16
+ return args[index + 1] ?? null;
17
+ };
18
+ const runPrepare = async config => {
19
+ const outputPath = await runBuild(config.build);
20
+ const baselinePath = await saveBaseline(config, outputPath);
21
+ console.log(`Baseline saved to ${baselinePath}`);
22
+ };
23
+ const runCompare = async config => {
24
+ const outputPath = await runBuild(config.build);
25
+ const baselinePath = await ensureBaseline(config);
26
+ const results = [];
27
+ for (const engine of config.engines) {
28
+ results.push(await runEngineComparison({
29
+ baselinePath,
30
+ config,
31
+ currentPath: outputPath,
32
+ engine
33
+ }));
34
+ }
35
+ const outputs = renderReports(results, config.report.formats);
36
+ for (const output of outputs) {
37
+ console.log(output);
38
+ }
39
+ const regressions = getRegressions(results);
40
+ if (regressions.length > 0) {
41
+ console.error(`Regression detected: ${regressions.length} benchmark(s) are slower.`);
42
+ process.exitCode = 1;
43
+ }
44
+ };
45
+ const main = async () => {
46
+ const args = process.argv.slice(2);
47
+ const command = args[0];
48
+ if (command !== "prepare" && command !== "compare") {
49
+ usage();
50
+ process.exitCode = 1;
51
+ return;
52
+ }
53
+ const configPath = getFlagValue(args, "--config");
54
+ let config;
55
+ try {
56
+ config = await loadConfig(configPath ?? undefined);
57
+ } catch (error) {
58
+ if (error instanceof ConfigError) {
59
+ console.error(formatConfigError(error));
60
+ process.exitCode = 1;
61
+ return;
62
+ }
63
+ throw error;
64
+ }
65
+ if (command === "prepare") {
66
+ await runPrepare(config);
67
+ return;
68
+ }
69
+ await runCompare(config);
70
+ };
71
+ main().catch(error => {
72
+ console.error(error);
73
+ process.exitCode = 1;
74
+ });
package/lib/config.js ADDED
@@ -0,0 +1,328 @@
1
+ import { readFile } from "node:fs/promises";
2
+ import { resolve } from "node:path";
3
+ export const DEFAULT_CONFIG_PATH = "perfshield.config.json";
4
+ export class ConfigError extends Error {
5
+ constructor(configPath, issues) {
6
+ super(`Invalid configuration in ${configPath}`);
7
+ this.name = "ConfigError";
8
+ this.configPath = configPath;
9
+ this.issues = issues;
10
+ }
11
+ }
12
+ export const formatConfigError = error => {
13
+ const lines = [`Invalid configuration in ${error.configPath}:`];
14
+ for (const issue of error.issues) {
15
+ lines.push(`- ${issue}`);
16
+ }
17
+ return lines.join("\n");
18
+ };
19
+ const addIssue = (issues, message) => {
20
+ issues.push(message);
21
+ };
22
+ const asObject = (value, label, issues) => {
23
+ if (value == null || typeof value !== "object" || Array.isArray(value)) {
24
+ addIssue(issues, `${label} must be an object.`);
25
+ return null;
26
+ }
27
+ return {
28
+ ...value
29
+ };
30
+ };
31
+ const asArray = (value, label, issues) => {
32
+ if (!Array.isArray(value)) {
33
+ addIssue(issues, `${label} must be an array.`);
34
+ return null;
35
+ }
36
+ return value;
37
+ };
38
+ const asString = (value, label, issues, options) => {
39
+ if (typeof value !== "string") {
40
+ addIssue(issues, `${label} must be a string.`);
41
+ return null;
42
+ }
43
+ const trimmed = value.trim();
44
+ const allowEmpty = options?.allowEmpty === true;
45
+ if (!allowEmpty && trimmed.length === 0) {
46
+ addIssue(issues, `${label} must be a non-empty string.`);
47
+ return null;
48
+ }
49
+ return value;
50
+ };
51
+ const asNumber = (value, label, issues, options) => {
52
+ if (typeof value !== "number" || Number.isNaN(value)) {
53
+ addIssue(issues, `${label} must be a number.`);
54
+ return null;
55
+ }
56
+ if (!Number.isFinite(value)) {
57
+ addIssue(issues, `${label} must be a finite number.`);
58
+ return null;
59
+ }
60
+ const requireInteger = options?.integer === true;
61
+ if (requireInteger && !Number.isInteger(value)) {
62
+ addIssue(issues, `${label} must be an integer.`);
63
+ return null;
64
+ }
65
+ if (options?.min != null && value < options.min) {
66
+ addIssue(issues, `${label} must be at least ${options.min}.`);
67
+ return null;
68
+ }
69
+ return value;
70
+ };
71
+ const asStringArray = (value, label, issues, options) => {
72
+ const entries = asArray(value, label, issues);
73
+ if (!entries) {
74
+ return null;
75
+ }
76
+ if (options?.minLength != null && entries.length < options.minLength) {
77
+ addIssue(issues, `${label} must have at least ${options.minLength} entry.`);
78
+ }
79
+ return entries.map((entry, index) => {
80
+ const parsed = asString(entry, `${label}[${index}]`, issues);
81
+ return parsed == null ? "" : parsed;
82
+ });
83
+ };
84
+ const asNumberArray = (value, label, issues, options) => {
85
+ const entries = asArray(value, label, issues);
86
+ if (!entries) {
87
+ return null;
88
+ }
89
+ if (options?.minLength != null && entries.length < options.minLength) {
90
+ addIssue(issues, `${label} must have at least ${options.minLength} entry.`);
91
+ }
92
+ return entries.map((entry, index) => {
93
+ const parsed = asNumber(entry, `${label}[${index}]`, issues);
94
+ return parsed == null ? 0 : parsed;
95
+ });
96
+ };
97
+ const asEnv = (value, label, issues) => {
98
+ const env = asObject(value, label, issues);
99
+ if (!env) {
100
+ return null;
101
+ }
102
+ const next = {};
103
+ for (const [key, entry] of Object.entries(env)) {
104
+ const envValue = asString(entry, `${label}.${key}`, issues, {
105
+ allowEmpty: true
106
+ });
107
+ if (envValue != null) {
108
+ next[key] = envValue;
109
+ }
110
+ }
111
+ return next;
112
+ };
113
+ const validateKeys = (value, allowed, label, issues) => {
114
+ for (const key of Object.keys(value)) {
115
+ if (!allowed.includes(key)) {
116
+ addIssue(issues, `Unknown field: ${label}.${key}.`);
117
+ }
118
+ }
119
+ };
120
+ const parseBuildConfig = (value, issues) => {
121
+ const build = asObject(value, "config.build", issues);
122
+ if (!build) {
123
+ return null;
124
+ }
125
+ validateKeys(build, ["command", "cwd", "output"], "config.build", issues);
126
+ const command = asString(build.command, "config.build.command", issues);
127
+ const output = asString(build.output, "config.build.output", issues);
128
+ const cwdValue = build.cwd == null ? undefined : asString(build.cwd, "config.build.cwd", issues);
129
+ const cwd = cwdValue == null ? undefined : cwdValue;
130
+ if (command == null || output == null) {
131
+ return null;
132
+ }
133
+ return {
134
+ command,
135
+ cwd,
136
+ output
137
+ };
138
+ };
139
+ const parseEngineConfig = (value, index, issues) => {
140
+ const engine = asObject(value, `config.engines[${index}]`, issues);
141
+ if (!engine) {
142
+ return null;
143
+ }
144
+ validateKeys(engine, ["args", "command", "env", "name"], `config.engines[${index}]`, issues);
145
+ const argsValue = engine.args == null ? undefined : asStringArray(engine.args, `config.engines[${index}].args`, issues);
146
+ const envValue = engine.env == null ? undefined : asEnv(engine.env, `config.engines[${index}].env`, issues);
147
+ const args = argsValue == null ? undefined : argsValue;
148
+ const env = envValue == null ? undefined : envValue;
149
+ const command = asString(engine.command, `config.engines[${index}].command`, issues);
150
+ const name = asString(engine.name, `config.engines[${index}].name`, issues);
151
+ if (command == null || name == null) {
152
+ return null;
153
+ }
154
+ return {
155
+ args,
156
+ command,
157
+ env,
158
+ name
159
+ };
160
+ };
161
+ const parseSamplingConditions = (value, issues) => {
162
+ const conditions = asObject(value, "config.sampling.conditions", issues);
163
+ if (!conditions) {
164
+ return null;
165
+ }
166
+ validateKeys(conditions, ["absolute", "relative"], "config.sampling.conditions", issues);
167
+ const absolute = asNumberArray(conditions.absolute, "config.sampling.conditions.absolute", issues, {
168
+ minLength: 1
169
+ });
170
+ const relative = asNumberArray(conditions.relative, "config.sampling.conditions.relative", issues, {
171
+ minLength: 1
172
+ });
173
+ if (!absolute || !relative) {
174
+ return null;
175
+ }
176
+ return {
177
+ absolute,
178
+ relative
179
+ };
180
+ };
181
+ const parseSamplingConfig = (value, issues) => {
182
+ const sampling = asObject(value, "config.sampling", issues);
183
+ if (!sampling) {
184
+ return null;
185
+ }
186
+ validateKeys(sampling, ["conditions", "minSamples", "timeoutMs"], "config.sampling", issues);
187
+ const minSamples = asNumber(sampling.minSamples, "config.sampling.minSamples", issues, {
188
+ integer: true,
189
+ min: 1
190
+ });
191
+ const timeoutMs = asNumber(sampling.timeoutMs, "config.sampling.timeoutMs", issues, {
192
+ integer: true,
193
+ min: 1
194
+ });
195
+ const conditions = parseSamplingConditions(sampling.conditions, issues);
196
+ if (minSamples == null || timeoutMs == null || !conditions) {
197
+ return null;
198
+ }
199
+ return {
200
+ conditions,
201
+ minSamples,
202
+ timeoutMs
203
+ };
204
+ };
205
+ const parseReportConfig = (value, issues) => {
206
+ const report = asObject(value, "config.report", issues);
207
+ if (!report) {
208
+ return null;
209
+ }
210
+ validateKeys(report, ["formats"], "config.report", issues);
211
+ const rawFormats = asStringArray(report.formats, "config.report.formats", issues, {
212
+ minLength: 1
213
+ });
214
+ if (!rawFormats) {
215
+ return null;
216
+ }
217
+ const formats = [];
218
+ for (const format of rawFormats) {
219
+ if (format == null) {
220
+ continue;
221
+ }
222
+ if (format !== "console" && format !== "json") {
223
+ addIssue(issues, `config.report.formats only supports "console" or "json" (received "${format}").`);
224
+ continue;
225
+ }
226
+ formats.push(format);
227
+ }
228
+ if (formats.length === 0) {
229
+ return null;
230
+ }
231
+ return {
232
+ formats
233
+ };
234
+ };
235
+ const parseConfig = (raw, issues) => {
236
+ const config = asObject(raw, "config", issues);
237
+ if (!config) {
238
+ return null;
239
+ }
240
+ validateKeys(config, ["artifactsDir", "build", "engines", "report", "sampling"], "config", issues);
241
+ const artifactsDir = asString(config.artifactsDir, "config.artifactsDir", issues);
242
+ const build = parseBuildConfig(config.build, issues);
243
+ const enginesRaw = asArray(config.engines, "config.engines", issues);
244
+ const engines = [];
245
+ if (enginesRaw) {
246
+ if (enginesRaw.length === 0) {
247
+ addIssue(issues, "config.engines must contain at least one entry.");
248
+ }
249
+ enginesRaw.forEach((entry, index) => {
250
+ const parsed = parseEngineConfig(entry, index, issues);
251
+ if (parsed) {
252
+ engines.push(parsed);
253
+ }
254
+ });
255
+ }
256
+ const report = parseReportConfig(config.report, issues);
257
+ const sampling = parseSamplingConfig(config.sampling, issues);
258
+ if (artifactsDir == null || build == null || report == null || sampling == null || engines.length === 0) {
259
+ return null;
260
+ }
261
+ return {
262
+ artifactsDir,
263
+ build,
264
+ engines,
265
+ report,
266
+ sampling
267
+ };
268
+ };
269
+ const positionToLineColumn = (contents, index) => {
270
+ let line = 1;
271
+ let column = 1;
272
+ for (let i = 0; i < Math.min(index, contents.length); i += 1) {
273
+ if (contents[i] === "\n") {
274
+ line += 1;
275
+ column = 1;
276
+ } else {
277
+ column += 1;
278
+ }
279
+ }
280
+ return {
281
+ column,
282
+ line
283
+ };
284
+ };
285
+ const describeJsonError = (contents, message) => {
286
+ const match = message.match(/position\s+(\d+)/i);
287
+ if (match) {
288
+ const index = Number(match[1]);
289
+ if (Number.isFinite(index)) {
290
+ const {
291
+ column,
292
+ line
293
+ } = positionToLineColumn(contents, index);
294
+ return `Invalid JSON: ${message} (line ${line}, column ${column}).`;
295
+ }
296
+ }
297
+ return `Invalid JSON: ${message}.`;
298
+ };
299
+ const coerceConfig = (raw, configPath) => {
300
+ const issues = [];
301
+ const config = parseConfig(raw, issues);
302
+ if (!config || issues.length > 0) {
303
+ throw new ConfigError(configPath, issues.length > 0 ? issues : ["Configuration is invalid."]);
304
+ }
305
+ return config;
306
+ };
307
+ export const loadConfig = async configPath => {
308
+ const resolvedPath = resolve(configPath ?? DEFAULT_CONFIG_PATH);
309
+ let contents;
310
+ try {
311
+ contents = await readFile(resolvedPath, "utf8");
312
+ } catch (error) {
313
+ const readError = error;
314
+ if (readError.code === "ENOENT") {
315
+ throw new ConfigError(resolvedPath, [`Config file not found. Expected a file at ${resolvedPath}.`]);
316
+ }
317
+ throw new ConfigError(resolvedPath, [`Failed to read config file: ${readError.message ?? "Unknown error"}.`]);
318
+ }
319
+ let raw;
320
+ try {
321
+ raw = JSON.parse(contents);
322
+ } catch (error) {
323
+ const parseError = error;
324
+ const issue = describeJsonError(contents, parseError.message ?? "Unable to parse JSON.");
325
+ throw new ConfigError(resolvedPath, [issue]);
326
+ }
327
+ return coerceConfig(raw, resolvedPath);
328
+ };