@chappibunny/repolens 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +219 -0
- package/README.md +899 -0
- package/RELEASE.md +52 -0
- package/bin/repolens.js +2 -0
- package/package.json +61 -0
- package/src/ai/document-plan.js +133 -0
- package/src/ai/generate-sections.js +271 -0
- package/src/ai/prompts.js +312 -0
- package/src/ai/provider.js +134 -0
- package/src/analyzers/context-builder.js +146 -0
- package/src/analyzers/domain-inference.js +127 -0
- package/src/analyzers/flow-inference.js +198 -0
- package/src/cli.js +271 -0
- package/src/core/config-schema.js +266 -0
- package/src/core/config.js +18 -0
- package/src/core/diff.js +45 -0
- package/src/core/scan.js +312 -0
- package/src/delivery/comment.js +139 -0
- package/src/docs/generate-doc-set.js +123 -0
- package/src/docs/write-doc-set.js +85 -0
- package/src/doctor.js +174 -0
- package/src/init.js +540 -0
- package/src/migrate.js +251 -0
- package/src/publishers/index.js +33 -0
- package/src/publishers/markdown.js +32 -0
- package/src/publishers/notion.js +325 -0
- package/src/publishers/publish.js +31 -0
- package/src/renderers/render.js +256 -0
- package/src/renderers/renderDiff.js +139 -0
- package/src/renderers/renderMap.js +224 -0
- package/src/utils/branch.js +93 -0
- package/src/utils/logger.js +26 -0
- package/src/utils/retry.js +55 -0
- package/src/utils/update-check.js +150 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
// Infer data flows through the application using heuristics
|
|
2
|
+
|
|
3
|
+
export function inferDataFlows(scanResult, config) {
|
|
4
|
+
const flows = [];
|
|
5
|
+
|
|
6
|
+
// Stock/Market Data Flow (if applicable)
|
|
7
|
+
const marketFlow = inferMarketDataFlow(scanResult);
|
|
8
|
+
if (marketFlow) flows.push(marketFlow);
|
|
9
|
+
|
|
10
|
+
// Authentication Flow
|
|
11
|
+
const authFlow = inferAuthFlow(scanResult);
|
|
12
|
+
if (authFlow) flows.push(authFlow);
|
|
13
|
+
|
|
14
|
+
// Content/Article Flow
|
|
15
|
+
const contentFlow = inferContentFlow(scanResult);
|
|
16
|
+
if (contentFlow) flows.push(contentFlow);
|
|
17
|
+
|
|
18
|
+
// API Integration Flow
|
|
19
|
+
const apiFlow = inferApiIntegrationFlow(scanResult);
|
|
20
|
+
if (apiFlow) flows.push(apiFlow);
|
|
21
|
+
|
|
22
|
+
return flows;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function inferMarketDataFlow(scanResult) {
|
|
26
|
+
const { modules, pages, api } = scanResult;
|
|
27
|
+
|
|
28
|
+
const hasStockModules = modules.some(m =>
|
|
29
|
+
m.key.toLowerCase().includes("stock") ||
|
|
30
|
+
m.key.toLowerCase().includes("market") ||
|
|
31
|
+
m.key.toLowerCase().includes("chart")
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
const hasStockPages = pages?.some(p =>
|
|
35
|
+
p.path.includes("stock") ||
|
|
36
|
+
p.path.includes("market")
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
const hasStockApis = api?.some(a =>
|
|
40
|
+
a.path.includes("price") ||
|
|
41
|
+
a.path.includes("stock") ||
|
|
42
|
+
a.path.includes("market")
|
|
43
|
+
);
|
|
44
|
+
|
|
45
|
+
if (!hasStockModules && !hasStockPages && !hasStockApis) return null;
|
|
46
|
+
|
|
47
|
+
return {
|
|
48
|
+
name: "Market Data Flow",
|
|
49
|
+
description: "User requests stock data, which flows through UI components to market data services",
|
|
50
|
+
steps: [
|
|
51
|
+
"User visits stock analysis page",
|
|
52
|
+
"Page component loads stock-specific UI",
|
|
53
|
+
"Components request data from stock libraries",
|
|
54
|
+
"Libraries fetch from market data APIs",
|
|
55
|
+
"Data is transformed and displayed to user"
|
|
56
|
+
],
|
|
57
|
+
modules: modules.filter(m =>
|
|
58
|
+
m.key.toLowerCase().includes("stock") ||
|
|
59
|
+
m.key.toLowerCase().includes("market") ||
|
|
60
|
+
m.key.toLowerCase().includes("chart")
|
|
61
|
+
).map(m => m.key),
|
|
62
|
+
critical: true
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function inferAuthFlow(scanResult) {
|
|
67
|
+
const { modules, pages, api } = scanResult;
|
|
68
|
+
|
|
69
|
+
const hasAuthModules = modules.some(m =>
|
|
70
|
+
m.key.toLowerCase().includes("auth") ||
|
|
71
|
+
m.key.toLowerCase().includes("login") ||
|
|
72
|
+
m.key.toLowerCase().includes("session")
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
const hasAuthPages = pages?.some(p =>
|
|
76
|
+
p.path.includes("login") ||
|
|
77
|
+
p.path.includes("signup") ||
|
|
78
|
+
p.path.includes("auth")
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
const hasAuthApis = api?.some(a =>
|
|
82
|
+
a.path.includes("auth") ||
|
|
83
|
+
a.path.includes("login") ||
|
|
84
|
+
a.path.includes("session")
|
|
85
|
+
);
|
|
86
|
+
|
|
87
|
+
if (!hasAuthModules && !hasAuthPages && !hasAuthApis) return null;
|
|
88
|
+
|
|
89
|
+
return {
|
|
90
|
+
name: "Authentication Flow",
|
|
91
|
+
description: "User authentication and session management",
|
|
92
|
+
steps: [
|
|
93
|
+
"User submits login credentials",
|
|
94
|
+
"Auth API validates credentials",
|
|
95
|
+
"Session is created and stored",
|
|
96
|
+
"User is redirected to authenticated area",
|
|
97
|
+
"Subsequent requests include auth token"
|
|
98
|
+
],
|
|
99
|
+
modules: modules.filter(m =>
|
|
100
|
+
m.key.toLowerCase().includes("auth") ||
|
|
101
|
+
m.key.toLowerCase().includes("login") ||
|
|
102
|
+
m.key.toLowerCase().includes("session")
|
|
103
|
+
).map(m => m.key),
|
|
104
|
+
critical: true
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function inferContentFlow(scanResult) {
|
|
109
|
+
const { modules, pages, api } = scanResult;
|
|
110
|
+
|
|
111
|
+
const hasContentModules = modules.some(m =>
|
|
112
|
+
m.key.toLowerCase().includes("article") ||
|
|
113
|
+
m.key.toLowerCase().includes("content") ||
|
|
114
|
+
m.key.toLowerCase().includes("newsletter") ||
|
|
115
|
+
m.key.toLowerCase().includes("blog")
|
|
116
|
+
);
|
|
117
|
+
|
|
118
|
+
const hasContentPages = pages?.some(p =>
|
|
119
|
+
p.path.includes("article") ||
|
|
120
|
+
p.path.includes("news") ||
|
|
121
|
+
p.path.includes("blog")
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
if (!hasContentModules && !hasContentPages) return null;
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
name: "Content Delivery Flow",
|
|
128
|
+
description: "Research articles and content are fetched and displayed to users",
|
|
129
|
+
steps: [
|
|
130
|
+
"User browses content sections",
|
|
131
|
+
"Content components load articles",
|
|
132
|
+
"Articles may be fetched from CMS or database",
|
|
133
|
+
"Content is rendered with formatting",
|
|
134
|
+
"User can interact with content features"
|
|
135
|
+
],
|
|
136
|
+
modules: modules.filter(m =>
|
|
137
|
+
m.key.toLowerCase().includes("article") ||
|
|
138
|
+
m.key.toLowerCase().includes("content") ||
|
|
139
|
+
m.key.toLowerCase().includes("newsletter")
|
|
140
|
+
).map(m => m.key),
|
|
141
|
+
critical: false
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function inferApiIntegrationFlow(scanResult) {
|
|
146
|
+
const { api } = scanResult;
|
|
147
|
+
|
|
148
|
+
if (!api || api.length === 0) return null;
|
|
149
|
+
|
|
150
|
+
return {
|
|
151
|
+
name: "API Integration Flow",
|
|
152
|
+
description: "External service integrations and backend processing",
|
|
153
|
+
steps: [
|
|
154
|
+
"Frontend makes API request",
|
|
155
|
+
"API route receives and validates request",
|
|
156
|
+
"Business logic is executed",
|
|
157
|
+
"External services may be called",
|
|
158
|
+
"Response is formatted and returned"
|
|
159
|
+
],
|
|
160
|
+
modules: [`${api.length} API endpoints detected`],
|
|
161
|
+
critical: false
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
export function identifyFlowDependencies(flow, scanResult) {
|
|
166
|
+
// Simple heuristic: modules in flow likely depend on each other
|
|
167
|
+
// and on shared libraries
|
|
168
|
+
const { modules } = scanResult;
|
|
169
|
+
|
|
170
|
+
const libModules = modules.filter(m =>
|
|
171
|
+
m.key.toLowerCase().includes("lib") ||
|
|
172
|
+
m.key.toLowerCase().includes("util")
|
|
173
|
+
);
|
|
174
|
+
|
|
175
|
+
return {
|
|
176
|
+
internalDependencies: flow.modules,
|
|
177
|
+
sharedLibraries: libModules.slice(0, 5).map(m => m.key),
|
|
178
|
+
externalDependencies: inferExternalDependencies(flow)
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
function inferExternalDependencies(flow) {
|
|
183
|
+
const external = [];
|
|
184
|
+
|
|
185
|
+
if (flow.name.includes("Market") || flow.name.includes("Stock")) {
|
|
186
|
+
external.push("Market data provider API");
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
if (flow.name.includes("Auth")) {
|
|
190
|
+
external.push("Authentication service");
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
if (flow.name.includes("Content")) {
|
|
194
|
+
external.push("Content management system");
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return external;
|
|
198
|
+
}
|
package/src/cli.js
ADDED
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import "dotenv/config";
|
|
2
|
+
import fs from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { loadConfig } from "./core/config.js";
|
|
6
|
+
import { scanRepo } from "./core/scan.js";
|
|
7
|
+
import { getGitDiff } from "./core/diff.js";
|
|
8
|
+
import { runDoctor } from "./doctor.js";
|
|
9
|
+
|
|
10
|
+
import {
|
|
11
|
+
renderSystemOverview,
|
|
12
|
+
renderModuleCatalog,
|
|
13
|
+
renderApiSurface,
|
|
14
|
+
renderRouteMap
|
|
15
|
+
} from "./renderers/render.js";
|
|
16
|
+
import { renderSystemMap } from "./renderers/renderMap.js";
|
|
17
|
+
import { renderArchitectureDiff, buildArchitectureDiffData } from "./renderers/renderDiff.js";
|
|
18
|
+
|
|
19
|
+
import { publishDocs } from "./publishers/index.js";
|
|
20
|
+
import { upsertPrComment } from "./delivery/comment.js";
|
|
21
|
+
import { runInit } from "./init.js";
|
|
22
|
+
import { runMigrate } from "./migrate.js";
|
|
23
|
+
import { info, error } from "./utils/logger.js";
|
|
24
|
+
import { checkForUpdates } from "./utils/update-check.js";
|
|
25
|
+
import { generateDocumentSet } from "./docs/generate-doc-set.js";
|
|
26
|
+
import { writeDocumentSet } from "./docs/write-doc-set.js";
|
|
27
|
+
|
|
28
|
+
async function getPackageVersion() {
|
|
29
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
30
|
+
const __dirname = path.dirname(__filename);
|
|
31
|
+
const packageJsonPath = path.resolve(__dirname, "../package.json");
|
|
32
|
+
|
|
33
|
+
const raw = await fs.readFile(packageJsonPath, "utf8");
|
|
34
|
+
const pkg = JSON.parse(raw);
|
|
35
|
+
|
|
36
|
+
return pkg.version || "0.0.0";
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function printBanner() {
|
|
40
|
+
const version = await getPackageVersion();
|
|
41
|
+
console.log(`\nRepoLens v${version}`);
|
|
42
|
+
console.log("─".repeat(40));
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function getArg(name) {
|
|
46
|
+
const index = process.argv.indexOf(name);
|
|
47
|
+
return index >= 0 ? process.argv[index + 1] : undefined;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Auto-discover .repolens.yml starting from current directory
|
|
52
|
+
* Searches upward through parent directories until found or root reached
|
|
53
|
+
*/
|
|
54
|
+
async function findConfig(startDir = process.cwd()) {
|
|
55
|
+
let currentDir = path.resolve(startDir);
|
|
56
|
+
const root = path.parse(currentDir).root;
|
|
57
|
+
|
|
58
|
+
while (currentDir !== root) {
|
|
59
|
+
const configPath = path.join(currentDir, ".repolens.yml");
|
|
60
|
+
try {
|
|
61
|
+
await fs.access(configPath);
|
|
62
|
+
return configPath;
|
|
63
|
+
} catch {
|
|
64
|
+
// Config not found, try parent directory
|
|
65
|
+
currentDir = path.dirname(currentDir);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Check root directory as final attempt
|
|
70
|
+
const rootConfigPath = path.join(root, ".repolens.yml");
|
|
71
|
+
try {
|
|
72
|
+
await fs.access(rootConfigPath);
|
|
73
|
+
return rootConfigPath;
|
|
74
|
+
} catch {
|
|
75
|
+
throw new Error(
|
|
76
|
+
"RepoLens config not found (.repolens.yml)\n" +
|
|
77
|
+
"Run 'repolens init' to create one, or use --config to specify a path."
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function printHelp() {
|
|
83
|
+
console.log(`
|
|
84
|
+
RepoLens — Repo intelligence CLI
|
|
85
|
+
|
|
86
|
+
Usage:
|
|
87
|
+
repolens <command> [options]
|
|
88
|
+
|
|
89
|
+
Commands:
|
|
90
|
+
init Scaffold RepoLens files in a target repository
|
|
91
|
+
doctor Validate a repository's RepoLens setup
|
|
92
|
+
migrate Upgrade workflow files to v0.4.0 format
|
|
93
|
+
publish Scan, render, and publish RepoLens outputs
|
|
94
|
+
version Print the current RepoLens version
|
|
95
|
+
|
|
96
|
+
Options:
|
|
97
|
+
--config Path to .repolens.yml (auto-discovered if not provided)
|
|
98
|
+
--target Target repository path for init/doctor/migrate
|
|
99
|
+
--dry-run Preview migration changes without applying them
|
|
100
|
+
--force Skip interactive confirmation for migration
|
|
101
|
+
--verbose Enable verbose logging
|
|
102
|
+
--version Print version
|
|
103
|
+
--help Show this help message
|
|
104
|
+
|
|
105
|
+
Examples:
|
|
106
|
+
repolens init --target /tmp/my-repo
|
|
107
|
+
repolens doctor --target /tmp/my-repo
|
|
108
|
+
repolens migrate # Migrate workflows in current directory
|
|
109
|
+
repolens migrate --dry-run # Preview changes without applying
|
|
110
|
+
repolens publish # Auto-discovers .repolens.yml
|
|
111
|
+
repolens publish --config /path/.repolens.yml # Explicit config path
|
|
112
|
+
repolens --version
|
|
113
|
+
`);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async function main() {
|
|
117
|
+
const command = process.argv[2];
|
|
118
|
+
|
|
119
|
+
// Check for updates (non-blocking, runs in background)
|
|
120
|
+
checkForUpdates().catch(() => {/* Silently fail */});
|
|
121
|
+
|
|
122
|
+
if (command === "--help" || command === "-h" || command === "help") {
|
|
123
|
+
printHelp();
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (command === "--version" || command === "-v" || command === "version") {
|
|
128
|
+
const version = await getPackageVersion();
|
|
129
|
+
console.log(version);
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (command === "init") {
|
|
134
|
+
await printBanner();
|
|
135
|
+
const targetDir = getArg("--target") || process.cwd();
|
|
136
|
+
info(`Initializing RepoLens in: ${targetDir}`);
|
|
137
|
+
try {
|
|
138
|
+
await runInit(targetDir);
|
|
139
|
+
info("✓ RepoLens initialized successfully");
|
|
140
|
+
} catch (err) {
|
|
141
|
+
error("Failed to initialize RepoLens:");
|
|
142
|
+
error(err.message);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
if (command === "doctor") {
|
|
149
|
+
await printBanner();
|
|
150
|
+
const targetDir = getArg("--target") || process.cwd();
|
|
151
|
+
info(`Validating RepoLens setup in: ${targetDir}`);
|
|
152
|
+
try {
|
|
153
|
+
await runDoctor(targetDir);
|
|
154
|
+
info("✓ RepoLens validation passed");
|
|
155
|
+
} catch (err) {
|
|
156
|
+
error("Validation failed:");
|
|
157
|
+
error(err.message);
|
|
158
|
+
process.exit(2);
|
|
159
|
+
}
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
if (command === "migrate") {
|
|
164
|
+
const targetDir = getArg("--target") || process.cwd();
|
|
165
|
+
const dryRun = process.argv.includes("--dry-run");
|
|
166
|
+
const force = process.argv.includes("--force");
|
|
167
|
+
|
|
168
|
+
try {
|
|
169
|
+
await runMigrate(targetDir, { dryRun, force });
|
|
170
|
+
} catch (err) {
|
|
171
|
+
error("Migration failed:");
|
|
172
|
+
error(err.message);
|
|
173
|
+
process.exit(1);
|
|
174
|
+
}
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (command === "publish" || !command || command.startsWith("--")) {
|
|
179
|
+
await printBanner();
|
|
180
|
+
|
|
181
|
+
// Auto-discover config if not provided
|
|
182
|
+
let configPath;
|
|
183
|
+
try {
|
|
184
|
+
configPath = getArg("--config") || await findConfig();
|
|
185
|
+
info(`Using config: ${configPath}`);
|
|
186
|
+
} catch (err) {
|
|
187
|
+
error(err.message);
|
|
188
|
+
process.exit(2);
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
let cfg, scan;
|
|
192
|
+
try {
|
|
193
|
+
info("Loading configuration...");
|
|
194
|
+
cfg = await loadConfig(configPath);
|
|
195
|
+
} catch (err) {
|
|
196
|
+
error("Failed to load configuration:");
|
|
197
|
+
error(err.message);
|
|
198
|
+
process.exit(2);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
try {
|
|
202
|
+
info("Scanning repository...");
|
|
203
|
+
scan = await scanRepo(cfg);
|
|
204
|
+
info(`Detected ${scan.modules?.length || 0} modules`);
|
|
205
|
+
} catch (err) {
|
|
206
|
+
error("Failed to scan repository:");
|
|
207
|
+
error(err.message);
|
|
208
|
+
process.exit(1);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
const rawDiff = getGitDiff("origin/main");
|
|
212
|
+
const diffData = buildArchitectureDiffData(rawDiff);
|
|
213
|
+
|
|
214
|
+
try {
|
|
215
|
+
info("Generating documentation set...");
|
|
216
|
+
const docSet = await generateDocumentSet(scan, cfg, rawDiff);
|
|
217
|
+
|
|
218
|
+
info("Writing documentation to disk...");
|
|
219
|
+
const writeResult = await writeDocumentSet(docSet, process.cwd());
|
|
220
|
+
info(`✓ Generated ${writeResult.documentCount} documents in ${writeResult.outputDir}`);
|
|
221
|
+
|
|
222
|
+
// Build legacy renderedPages format for Notion publishing
|
|
223
|
+
const renderedPages = {};
|
|
224
|
+
for (const doc of docSet.documents) {
|
|
225
|
+
// Map new document keys to legacy keys for backwards compatibility
|
|
226
|
+
const legacyKey = doc.key;
|
|
227
|
+
renderedPages[legacyKey] = doc.content;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
info("Publishing documentation...");
|
|
231
|
+
await publishDocs(cfg, renderedPages);
|
|
232
|
+
await upsertPrComment(diffData);
|
|
233
|
+
info("✓ Documentation published successfully");
|
|
234
|
+
} catch (err) {
|
|
235
|
+
error("Failed to publish documentation:");
|
|
236
|
+
error(err.message);
|
|
237
|
+
process.exit(1);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
error(`Unknown command: ${command}`);
|
|
244
|
+
error("Available commands: init, doctor, migrate, publish, version, help");
|
|
245
|
+
process.exit(1);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
main().catch((err) => {
|
|
249
|
+
console.error("\n❌ RepoLens encountered an unexpected error:\n");
|
|
250
|
+
|
|
251
|
+
if (err.code === "ENOENT") {
|
|
252
|
+
error(`File not found: ${err.path}`);
|
|
253
|
+
error("Check that all required files exist and paths are correct.");
|
|
254
|
+
} else if (err.code === "EACCES") {
|
|
255
|
+
error(`Permission denied: ${err.path}`);
|
|
256
|
+
error("Check file permissions and try again.");
|
|
257
|
+
} else if (err.message) {
|
|
258
|
+
error(err.message);
|
|
259
|
+
} else {
|
|
260
|
+
error(err);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
if (process.env.VERBOSE || process.argv.includes("--verbose")) {
|
|
264
|
+
console.error("\nStack trace:");
|
|
265
|
+
console.error(err.stack);
|
|
266
|
+
} else {
|
|
267
|
+
console.error("\nRun with --verbose for full error details.");
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
process.exit(1);
|
|
271
|
+
});
|