@toolbaux/guardian 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +366 -0
- package/dist/adapters/csharp-adapter.js +149 -0
- package/dist/adapters/go-adapter.js +96 -0
- package/dist/adapters/index.js +16 -0
- package/dist/adapters/java-adapter.js +122 -0
- package/dist/adapters/python-adapter.js +183 -0
- package/dist/adapters/runner.js +69 -0
- package/dist/adapters/types.js +1 -0
- package/dist/adapters/typescript-adapter.js +179 -0
- package/dist/benchmarking/framework.js +91 -0
- package/dist/cli.js +343 -0
- package/dist/commands/analyze-depth.js +43 -0
- package/dist/commands/api-spec-extractor.js +52 -0
- package/dist/commands/breaking-change-analyzer.js +334 -0
- package/dist/commands/config-compliance.js +219 -0
- package/dist/commands/constraints.js +221 -0
- package/dist/commands/context.js +101 -0
- package/dist/commands/data-flow-tracer.js +291 -0
- package/dist/commands/dependency-impact-analyzer.js +27 -0
- package/dist/commands/diff.js +146 -0
- package/dist/commands/discrepancy.js +71 -0
- package/dist/commands/doc-generate.js +163 -0
- package/dist/commands/doc-html.js +120 -0
- package/dist/commands/drift.js +88 -0
- package/dist/commands/extract.js +16 -0
- package/dist/commands/feature-context.js +116 -0
- package/dist/commands/generate.js +339 -0
- package/dist/commands/guard.js +182 -0
- package/dist/commands/init.js +209 -0
- package/dist/commands/intel.js +20 -0
- package/dist/commands/license-dependency-auditor.js +33 -0
- package/dist/commands/performance-hotspot-profiler.js +42 -0
- package/dist/commands/search.js +314 -0
- package/dist/commands/security-boundary-auditor.js +359 -0
- package/dist/commands/simulate.js +294 -0
- package/dist/commands/summary.js +27 -0
- package/dist/commands/test-coverage-mapper.js +264 -0
- package/dist/commands/verify-drift.js +62 -0
- package/dist/config.js +441 -0
- package/dist/extract/ai-context-hints.js +107 -0
- package/dist/extract/analyzers/backend.js +1704 -0
- package/dist/extract/analyzers/depth.js +264 -0
- package/dist/extract/analyzers/frontend.js +2221 -0
- package/dist/extract/api-usage-tracker.js +19 -0
- package/dist/extract/cache.js +53 -0
- package/dist/extract/codebase-intel.js +190 -0
- package/dist/extract/compress.js +452 -0
- package/dist/extract/context-block.js +356 -0
- package/dist/extract/contracts.js +183 -0
- package/dist/extract/discrepancies.js +233 -0
- package/dist/extract/docs-loader.js +110 -0
- package/dist/extract/docs.js +2379 -0
- package/dist/extract/drift.js +1578 -0
- package/dist/extract/duplicates.js +435 -0
- package/dist/extract/feature-arcs.js +138 -0
- package/dist/extract/graph.js +76 -0
- package/dist/extract/html-doc.js +1409 -0
- package/dist/extract/ignore.js +45 -0
- package/dist/extract/index.js +455 -0
- package/dist/extract/llm-client.js +159 -0
- package/dist/extract/pattern-registry.js +141 -0
- package/dist/extract/product-doc.js +497 -0
- package/dist/extract/python.js +1202 -0
- package/dist/extract/runtime.js +193 -0
- package/dist/extract/schema-evolution-validator.js +35 -0
- package/dist/extract/test-gap-analyzer.js +20 -0
- package/dist/extract/tests.js +74 -0
- package/dist/extract/types.js +1 -0
- package/dist/extract/validate-backend.js +30 -0
- package/dist/extract/writer.js +11 -0
- package/dist/output-layout.js +37 -0
- package/dist/project-discovery.js +309 -0
- package/dist/schema/architecture.js +350 -0
- package/dist/schema/feature-spec.js +89 -0
- package/dist/schema/index.js +8 -0
- package/dist/schema/ux.js +46 -0
- package/package.json +75 -0
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Discrepancy Detection — diffs current codebase intelligence against a committed baseline.
|
|
3
|
+
*
|
|
4
|
+
* Answers: "what changed in the code that isn't reflected in our product docs or feature specs?"
|
|
5
|
+
*
|
|
6
|
+
* Checks:
|
|
7
|
+
* 1. untracked_endpoints — endpoints in code but not in any feature spec
|
|
8
|
+
* 2. new_endpoints — endpoints added since the baseline
|
|
9
|
+
* 3. removed_endpoints — endpoints present in baseline but gone from code
|
|
10
|
+
* 4. drifted_models — ORM models whose field count changed since baseline
|
|
11
|
+
* 5. new_models — models added since baseline
|
|
12
|
+
* 6. removed_models — models removed since baseline
|
|
13
|
+
* 7. new_tasks — background tasks added since baseline
|
|
14
|
+
* 8. orphan_specs — feature specs referencing endpoints that no longer exist
|
|
15
|
+
*/
|
|
16
|
+
import fs from "node:fs/promises";
|
|
17
|
+
import path from "node:path";
|
|
18
|
+
import yaml from "js-yaml";
|
|
19
|
+
import { parseFeatureSpec } from "../schema/feature-spec.js";
|
|
20
|
+
export function buildBaseline(intel) {
|
|
21
|
+
return {
|
|
22
|
+
generated_at: intel.meta.generated_at,
|
|
23
|
+
endpoints: Object.keys(intel.api_registry).sort(),
|
|
24
|
+
models: Object.fromEntries(Object.entries(intel.model_registry).map(([name, m]) => [name, m.fields.length])),
|
|
25
|
+
tasks: intel.background_tasks.map((t) => t.name).sort(),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
export async function buildDiscrepancyReport(params) {
|
|
29
|
+
const { intel, baselinePath, featureSpecsDir } = params;
|
|
30
|
+
const currentEndpoints = new Set(Object.keys(intel.api_registry));
|
|
31
|
+
const currentModels = new Map(Object.entries(intel.model_registry).map(([name, m]) => [name, m.fields.length]));
|
|
32
|
+
const currentTasks = new Set(intel.background_tasks.map((t) => t.name));
|
|
33
|
+
// Load baseline
|
|
34
|
+
let baseline = null;
|
|
35
|
+
if (baselinePath) {
|
|
36
|
+
try {
|
|
37
|
+
const raw = await fs.readFile(baselinePath, "utf8");
|
|
38
|
+
baseline = JSON.parse(raw);
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
// No baseline yet — first run
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
const baselineEndpoints = new Set(baseline?.endpoints ?? []);
|
|
45
|
+
const baselineModels = new Map(Object.entries(baseline?.models ?? {}));
|
|
46
|
+
const baselineTasks = new Set(baseline?.tasks ?? []);
|
|
47
|
+
// Load feature specs to find covered endpoints
|
|
48
|
+
const coveredEndpoints = new Set();
|
|
49
|
+
const orphanSpecs = [];
|
|
50
|
+
if (featureSpecsDir) {
|
|
51
|
+
const specs = await loadFeatureSpecs(featureSpecsDir);
|
|
52
|
+
for (const { spec, file } of specs) {
|
|
53
|
+
const missing = [];
|
|
54
|
+
for (const ep of spec.affected_endpoints) {
|
|
55
|
+
coveredEndpoints.add(ep);
|
|
56
|
+
if (!currentEndpoints.has(ep)) {
|
|
57
|
+
missing.push(ep);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
if (missing.length > 0) {
|
|
61
|
+
orphanSpecs.push({ spec_file: file, missing_endpoints: missing });
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// 1. Untracked endpoints (in code, not in any spec)
|
|
66
|
+
const untrackedEndpoints = Array.from(currentEndpoints)
|
|
67
|
+
.filter((ep) => !coveredEndpoints.has(ep))
|
|
68
|
+
.sort();
|
|
69
|
+
// 2. New endpoints since baseline
|
|
70
|
+
const newEndpoints = Array.from(currentEndpoints)
|
|
71
|
+
.filter((ep) => !baselineEndpoints.has(ep))
|
|
72
|
+
.sort();
|
|
73
|
+
// 3. Removed endpoints since baseline
|
|
74
|
+
const removedEndpoints = Array.from(baselineEndpoints)
|
|
75
|
+
.filter((ep) => !currentEndpoints.has(ep))
|
|
76
|
+
.sort();
|
|
77
|
+
// 4. Drifted models (field count changed)
|
|
78
|
+
const driftedModels = [];
|
|
79
|
+
for (const [name, currentCount] of currentModels) {
|
|
80
|
+
const baselineCount = baselineModels.get(name);
|
|
81
|
+
if (baselineCount !== undefined && baselineCount !== currentCount) {
|
|
82
|
+
driftedModels.push({
|
|
83
|
+
name,
|
|
84
|
+
baseline_field_count: baselineCount,
|
|
85
|
+
current_field_count: currentCount,
|
|
86
|
+
delta: currentCount - baselineCount,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
driftedModels.sort((a, b) => Math.abs(b.delta) - Math.abs(a.delta));
|
|
91
|
+
// 5. New models
|
|
92
|
+
const newModels = Array.from(currentModels.keys())
|
|
93
|
+
.filter((name) => !baselineModels.has(name))
|
|
94
|
+
.sort();
|
|
95
|
+
// 6. Removed models
|
|
96
|
+
const removedModels = Array.from(baselineModels.keys())
|
|
97
|
+
.filter((name) => !currentModels.has(name))
|
|
98
|
+
.sort();
|
|
99
|
+
// 7. New tasks
|
|
100
|
+
const newTasks = Array.from(currentTasks)
|
|
101
|
+
.filter((t) => !baselineTasks.has(t))
|
|
102
|
+
.sort();
|
|
103
|
+
const totalIssues = newEndpoints.length +
|
|
104
|
+
removedEndpoints.length +
|
|
105
|
+
driftedModels.length +
|
|
106
|
+
newModels.length +
|
|
107
|
+
removedModels.length +
|
|
108
|
+
newTasks.length +
|
|
109
|
+
orphanSpecs.length;
|
|
110
|
+
const hasCritical = removedEndpoints.length > 0 ||
|
|
111
|
+
removedModels.length > 0 ||
|
|
112
|
+
orphanSpecs.length > 0;
|
|
113
|
+
return {
|
|
114
|
+
generated_at: new Date().toISOString(),
|
|
115
|
+
baseline_at: baseline?.generated_at ?? null,
|
|
116
|
+
summary: { total_issues: totalIssues, has_critical: hasCritical },
|
|
117
|
+
untracked_endpoints: untrackedEndpoints,
|
|
118
|
+
new_endpoints: newEndpoints,
|
|
119
|
+
removed_endpoints: removedEndpoints,
|
|
120
|
+
drifted_models: driftedModels,
|
|
121
|
+
new_models: newModels,
|
|
122
|
+
removed_models: removedModels,
|
|
123
|
+
new_tasks: newTasks,
|
|
124
|
+
orphan_specs: orphanSpecs,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
async function loadFeatureSpecs(dir) {
|
|
128
|
+
let entries;
|
|
129
|
+
try {
|
|
130
|
+
const dirEntries = await fs.readdir(dir);
|
|
131
|
+
entries = dirEntries
|
|
132
|
+
.filter((f) => f.endsWith(".yaml") || f.endsWith(".yml"))
|
|
133
|
+
.map((f) => path.join(dir, f));
|
|
134
|
+
}
|
|
135
|
+
catch {
|
|
136
|
+
return [];
|
|
137
|
+
}
|
|
138
|
+
const results = [];
|
|
139
|
+
for (const entry of entries) {
|
|
140
|
+
try {
|
|
141
|
+
const raw = await fs.readFile(entry, "utf8");
|
|
142
|
+
const parsed = yaml.load(raw);
|
|
143
|
+
results.push({ spec: parseFeatureSpec(parsed), file: path.basename(entry) });
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
// Skip malformed specs
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return results;
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Render a discrepancy report as human-readable Markdown.
|
|
153
|
+
*/
|
|
154
|
+
export function renderDiscrepancyMarkdown(report) {
|
|
155
|
+
const lines = [];
|
|
156
|
+
const since = report.baseline_at
|
|
157
|
+
? `since baseline (${report.baseline_at.slice(0, 10)})`
|
|
158
|
+
: "(no baseline — first run)";
|
|
159
|
+
lines.push(`# Discrepancy Report`);
|
|
160
|
+
lines.push(`_Generated: ${report.generated_at.slice(0, 19).replace("T", " ")} UTC_`);
|
|
161
|
+
lines.push(`_Baseline: ${report.baseline_at ? report.baseline_at.slice(0, 10) : "none"}_`);
|
|
162
|
+
lines.push("");
|
|
163
|
+
lines.push(`**${report.summary.total_issues} issue(s) found**${report.summary.has_critical ? " — ⚠ critical changes detected" : ""}`);
|
|
164
|
+
lines.push("");
|
|
165
|
+
if (report.new_endpoints.length > 0) {
|
|
166
|
+
lines.push(`## New Endpoints ${since} (${report.new_endpoints.length})`);
|
|
167
|
+
lines.push("");
|
|
168
|
+
for (const ep of report.new_endpoints)
|
|
169
|
+
lines.push(`- \`${ep}\``);
|
|
170
|
+
lines.push("");
|
|
171
|
+
}
|
|
172
|
+
if (report.removed_endpoints.length > 0) {
|
|
173
|
+
lines.push(`## ⚠ Removed Endpoints ${since} (${report.removed_endpoints.length})`);
|
|
174
|
+
lines.push("");
|
|
175
|
+
for (const ep of report.removed_endpoints)
|
|
176
|
+
lines.push(`- \`${ep}\``);
|
|
177
|
+
lines.push("");
|
|
178
|
+
}
|
|
179
|
+
if (report.untracked_endpoints.length > 0) {
|
|
180
|
+
lines.push(`## Untracked Endpoints — not in any feature spec (${report.untracked_endpoints.length})`);
|
|
181
|
+
lines.push("");
|
|
182
|
+
for (const ep of report.untracked_endpoints.slice(0, 20))
|
|
183
|
+
lines.push(`- \`${ep}\``);
|
|
184
|
+
if (report.untracked_endpoints.length > 20) {
|
|
185
|
+
lines.push(`- _...and ${report.untracked_endpoints.length - 20} more_`);
|
|
186
|
+
}
|
|
187
|
+
lines.push("");
|
|
188
|
+
}
|
|
189
|
+
if (report.drifted_models.length > 0) {
|
|
190
|
+
lines.push(`## Drifted Models — field count changed (${report.drifted_models.length})`);
|
|
191
|
+
lines.push("");
|
|
192
|
+
lines.push("| Model | Baseline fields | Current fields | Delta |");
|
|
193
|
+
lines.push("|---|---|---|---|");
|
|
194
|
+
for (const m of report.drifted_models) {
|
|
195
|
+
const sign = m.delta > 0 ? "+" : "";
|
|
196
|
+
lines.push(`| ${m.name} | ${m.baseline_field_count} | ${m.current_field_count} | ${sign}${m.delta} |`);
|
|
197
|
+
}
|
|
198
|
+
lines.push("");
|
|
199
|
+
}
|
|
200
|
+
if (report.new_models.length > 0) {
|
|
201
|
+
lines.push(`## New Models ${since} (${report.new_models.length})`);
|
|
202
|
+
lines.push("");
|
|
203
|
+
for (const m of report.new_models)
|
|
204
|
+
lines.push(`- \`${m}\``);
|
|
205
|
+
lines.push("");
|
|
206
|
+
}
|
|
207
|
+
if (report.removed_models.length > 0) {
|
|
208
|
+
lines.push(`## ⚠ Removed Models ${since} (${report.removed_models.length})`);
|
|
209
|
+
lines.push("");
|
|
210
|
+
for (const m of report.removed_models)
|
|
211
|
+
lines.push(`- \`${m}\``);
|
|
212
|
+
lines.push("");
|
|
213
|
+
}
|
|
214
|
+
if (report.new_tasks.length > 0) {
|
|
215
|
+
lines.push(`## New Background Tasks ${since} (${report.new_tasks.length})`);
|
|
216
|
+
lines.push("");
|
|
217
|
+
for (const t of report.new_tasks)
|
|
218
|
+
lines.push(`- \`${t}\``);
|
|
219
|
+
lines.push("");
|
|
220
|
+
}
|
|
221
|
+
if (report.orphan_specs.length > 0) {
|
|
222
|
+
lines.push(`## ⚠ Orphan Feature Specs — reference endpoints that no longer exist (${report.orphan_specs.length})`);
|
|
223
|
+
lines.push("");
|
|
224
|
+
for (const s of report.orphan_specs) {
|
|
225
|
+
lines.push(`**${s.spec_file}**: ${s.missing_endpoints.map((e) => `\`${e}\``).join(", ")}`);
|
|
226
|
+
}
|
|
227
|
+
lines.push("");
|
|
228
|
+
}
|
|
229
|
+
if (report.summary.total_issues === 0) {
|
|
230
|
+
lines.push("No discrepancies found. Code and specs are in sync.");
|
|
231
|
+
}
|
|
232
|
+
return lines.join("\n");
|
|
233
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docs Loader — loads and parses sections from existing specguard doc files.
|
|
3
|
+
*
|
|
4
|
+
* Reads from specs-out/machine/docs/:
|
|
5
|
+
* hld.md → systemDiagram, couplingHeatmap, backendSubsystems, apiDomainMap
|
|
6
|
+
* summary.md → qualitySignals, systemScale
|
|
7
|
+
* stakeholder.md → stakeholderMetrics
|
|
8
|
+
* integration.md → integrationByDomain (full content, already domain-grouped)
|
|
9
|
+
*/
|
|
10
|
+
import fs from "node:fs/promises";
|
|
11
|
+
import path from "node:path";
|
|
12
|
+
/**
|
|
13
|
+
* Load and parse existing docs from the machine docs directory.
|
|
14
|
+
* All fields are optional — missing files are silently skipped.
|
|
15
|
+
*/
|
|
16
|
+
export async function loadExistingDocs(machineDocsDir) {
|
|
17
|
+
const [hld, summary, stakeholder, integration] = await Promise.all([
|
|
18
|
+
readIfExists(path.join(machineDocsDir, "hld.md")),
|
|
19
|
+
readIfExists(path.join(machineDocsDir, "summary.md")),
|
|
20
|
+
readIfExists(path.join(machineDocsDir, "stakeholder.md")),
|
|
21
|
+
readIfExists(path.join(machineDocsDir, "integration.md")),
|
|
22
|
+
]);
|
|
23
|
+
return {
|
|
24
|
+
systemDiagram: hld ? extractSection(hld, "System Block Diagram") : undefined,
|
|
25
|
+
couplingHeatmap: hld ? extractSection(hld, "Structural Coupling Heatmap (Top Functions)") : undefined,
|
|
26
|
+
driftSummary: hld ? extractSection(hld, "Drift Summary") : undefined,
|
|
27
|
+
backendSubsystems: hld ? extractSection(hld, "Backend Subsystems") : undefined,
|
|
28
|
+
apiDomainMap: hld ? extractSection(hld, "API Domain Map") : undefined,
|
|
29
|
+
qualitySignals: summary ? extractSection(summary, "Quality & Drift Signals") : undefined,
|
|
30
|
+
systemScale: summary ? extractSection(summary, "System Scale (Current State)") : undefined,
|
|
31
|
+
stakeholderMetrics: stakeholder ? extractStakeholderMetrics(stakeholder) : undefined,
|
|
32
|
+
integrationByDomain: integration ?? undefined,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Extract a named `## Heading` section from markdown.
|
|
37
|
+
* Returns content from just after the heading until the next `## ` heading (or EOF).
|
|
38
|
+
* Returns undefined if the heading is not found.
|
|
39
|
+
*/
|
|
40
|
+
export function extractSection(markdown, heading) {
|
|
41
|
+
const headingLine = `## ${heading}`;
|
|
42
|
+
const idx = markdown.indexOf(headingLine);
|
|
43
|
+
if (idx === -1)
|
|
44
|
+
return undefined;
|
|
45
|
+
const contentStart = idx + headingLine.length;
|
|
46
|
+
const nextHeading = markdown.indexOf("\n## ", contentStart);
|
|
47
|
+
const raw = nextHeading === -1
|
|
48
|
+
? markdown.slice(contentStart)
|
|
49
|
+
: markdown.slice(contentStart, nextHeading);
|
|
50
|
+
return raw.trim() || undefined;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Extract the metrics table and health snapshot from stakeholder.md.
|
|
54
|
+
* Returns from the first `|` table up through the health snapshot section.
|
|
55
|
+
*/
|
|
56
|
+
function extractStakeholderMetrics(stakeholder) {
|
|
57
|
+
// Extract from first table line to end of "## Health Snapshot" section
|
|
58
|
+
const tableStart = stakeholder.indexOf("\n| ");
|
|
59
|
+
if (tableStart === -1)
|
|
60
|
+
return undefined;
|
|
61
|
+
// Find end of health snapshot section (next ## after it, or EOF)
|
|
62
|
+
const healthIdx = stakeholder.indexOf("## Health Snapshot");
|
|
63
|
+
if (healthIdx === -1) {
|
|
64
|
+
// Just return the metrics table
|
|
65
|
+
const tableEnd = stakeholder.indexOf("\n## ", tableStart + 1);
|
|
66
|
+
return tableEnd === -1
|
|
67
|
+
? stakeholder.slice(tableStart).trim()
|
|
68
|
+
: stakeholder.slice(tableStart, tableEnd).trim();
|
|
69
|
+
}
|
|
70
|
+
const afterHealth = stakeholder.indexOf("\n## ", healthIdx + 1);
|
|
71
|
+
const end = afterHealth === -1 ? stakeholder.length : afterHealth;
|
|
72
|
+
return stakeholder.slice(tableStart, end).trim();
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Parse integration.md into a map of domain → markdown table string.
|
|
76
|
+
* Domains are `## /domain-prefix` headings.
|
|
77
|
+
*/
|
|
78
|
+
export function parseIntegrationDomains(integration) {
|
|
79
|
+
const domains = new Map();
|
|
80
|
+
// Split on "## " headings
|
|
81
|
+
const sections = integration.split(/\n(?=## )/);
|
|
82
|
+
for (const section of sections) {
|
|
83
|
+
const headingMatch = section.match(/^## (.+)/);
|
|
84
|
+
if (!headingMatch)
|
|
85
|
+
continue;
|
|
86
|
+
const heading = headingMatch[1].trim();
|
|
87
|
+
// Normalise: "/api/auth" → "auth", "/" → "root", "/api/{project_id}" → "{project_id}"
|
|
88
|
+
const domain = normaliseIntegrationHeading(heading);
|
|
89
|
+
const content = section.slice(headingMatch[0].length).trim();
|
|
90
|
+
if (content) {
|
|
91
|
+
domains.set(domain, { heading, content });
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return domains;
|
|
95
|
+
}
|
|
96
|
+
function normaliseIntegrationHeading(heading) {
|
|
97
|
+
// "/api/auth" → "auth"
|
|
98
|
+
// "/api/{project_id}" → "{project_id}"
|
|
99
|
+
// "/" → "root"
|
|
100
|
+
const stripped = heading.replace(/^\/api\//, "").replace(/^\//, "") || "root";
|
|
101
|
+
return stripped;
|
|
102
|
+
}
|
|
103
|
+
async function readIfExists(filePath) {
|
|
104
|
+
try {
|
|
105
|
+
return await fs.readFile(filePath, "utf8");
|
|
106
|
+
}
|
|
107
|
+
catch {
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
}
|