@bryceli/openclaw 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +45 -0
- package/package.json +25 -0
- package/scripts/run-openclaw.mjs +29 -0
- package/scripts/setup-openclaw.mjs +393 -0
package/README.md
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# OpenClaw
|
|
2
|
+
|
|
3
|
+
This project installs `openclaw` locally with npm and keeps all OpenClaw state inside this project.
|
|
4
|
+
|
|
5
|
+
## What is isolated
|
|
6
|
+
|
|
7
|
+
- OpenClaw is installed in local `node_modules`
|
|
8
|
+
- config is stored in `.openclaw-home/openclaw.json`
|
|
9
|
+
- secrets are stored in `.openclaw-home/.env`
|
|
10
|
+
- workspace is stored in `.openclaw-home/workspace`
|
|
11
|
+
|
|
12
|
+
## Package name
|
|
13
|
+
|
|
14
|
+
This publishable wrapper package is intended to be published as `@bryceli/openclaw`.
|
|
15
|
+
|
|
16
|
+
## Quick start
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
npm install
|
|
20
|
+
npm run setup
|
|
21
|
+
npm run gateway
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Useful commands
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
npm run openclaw -- --version
|
|
28
|
+
npm run dashboard
|
|
29
|
+
npm run models:list
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Interface types
|
|
33
|
+
|
|
34
|
+
- OpenAI Compatible (最常用)
|
|
35
|
+
- Anthropic Native (原生)
|
|
36
|
+
- OpenAI Responses (新接口)
|
|
37
|
+
- Google Gemini (原生)
|
|
38
|
+
- Ollama Native (本地原生)
|
|
39
|
+
|
|
40
|
+
## Notes
|
|
41
|
+
|
|
42
|
+
- This project is only for OpenClaw.
|
|
43
|
+
- It does not configure Claude Code.
|
|
44
|
+
- It does not configure OpenCode.
|
|
45
|
+
- All paths are project-local through `OPENCLAW_HOME`.
|
package/package.json
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@bryceli/openclaw",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Project-local OpenClaw installer and model configurator.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"publishConfig": {
|
|
7
|
+
"access": "public"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"setup": "node scripts/setup-openclaw.mjs",
|
|
11
|
+
"openclaw": "node scripts/run-openclaw.mjs",
|
|
12
|
+
"gateway": "node scripts/run-openclaw.mjs gateway run",
|
|
13
|
+
"dashboard": "node scripts/run-openclaw.mjs dashboard",
|
|
14
|
+
"models:list": "node scripts/run-openclaw.mjs models list",
|
|
15
|
+
"models:test": "node scripts/run-openclaw.mjs models test"
|
|
16
|
+
},
|
|
17
|
+
"engines": {
|
|
18
|
+
"node": ">=22"
|
|
19
|
+
},
|
|
20
|
+
"dependencies": {
|
|
21
|
+
"@clack/prompts": "^0.10.1",
|
|
22
|
+
"openclaw": "latest",
|
|
23
|
+
"picocolors": "^1.1.1"
|
|
24
|
+
}
|
|
25
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
|
|
5
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
6
|
+
const __dirname = path.dirname(__filename);
|
|
7
|
+
const projectRoot = path.resolve(__dirname, "..");
|
|
8
|
+
const openclawHome = path.join(projectRoot, ".openclaw-home");
|
|
9
|
+
|
|
10
|
+
const npmCommand = process.platform === "win32" ? "npm.cmd" : "npm";
|
|
11
|
+
const args = ["exec", "--", "openclaw", ...process.argv.slice(2)];
|
|
12
|
+
|
|
13
|
+
const child = spawn(npmCommand, args, {
|
|
14
|
+
cwd: projectRoot,
|
|
15
|
+
env: {
|
|
16
|
+
...process.env,
|
|
17
|
+
OPENCLAW_HOME: openclawHome
|
|
18
|
+
},
|
|
19
|
+
stdio: "inherit"
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
child.on("exit", (code, signal) => {
|
|
23
|
+
if (signal) {
|
|
24
|
+
process.kill(process.pid, signal);
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
process.exit(code ?? 0);
|
|
29
|
+
});
|
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
import * as p from "@clack/prompts";
|
|
2
|
+
import pc from "picocolors";
|
|
3
|
+
import { mkdirSync, writeFileSync } from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = path.dirname(__filename);
|
|
9
|
+
const projectRoot = path.resolve(__dirname, "..");
|
|
10
|
+
const openclawHome = path.join(projectRoot, ".openclaw-home");
|
|
11
|
+
const workspaceDir = path.join(openclawHome, "workspace");
|
|
12
|
+
const configPath = path.join(openclawHome, "openclaw.json");
|
|
13
|
+
const envPath = path.join(openclawHome, ".env");
|
|
14
|
+
|
|
15
|
+
const INTERFACE_TYPES = [
|
|
16
|
+
{
|
|
17
|
+
value: "openai-compatible",
|
|
18
|
+
label: "OpenAI Compatible (最常用)",
|
|
19
|
+
hint: "For proxy gateways and generic OpenAI-style APIs",
|
|
20
|
+
defaultBaseUrl: "https://api.openai.com/v1"
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
value: "anthropic-native",
|
|
24
|
+
label: "Anthropic Native (原生)",
|
|
25
|
+
hint: "For Anthropic-compatible Messages endpoints",
|
|
26
|
+
defaultBaseUrl: "https://api.anthropic.com"
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
value: "openai-responses",
|
|
30
|
+
label: "OpenAI Responses (新接口)",
|
|
31
|
+
hint: "For OpenAI Responses-style providers",
|
|
32
|
+
defaultBaseUrl: "https://api.openai.com/v1"
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
value: "google-gemini",
|
|
36
|
+
label: "Google Gemini (原生)",
|
|
37
|
+
hint: "For Gemini-native APIs",
|
|
38
|
+
defaultBaseUrl: "https://generativelanguage.googleapis.com"
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
value: "ollama-native",
|
|
42
|
+
label: "Ollama Native (本地原生)",
|
|
43
|
+
hint: "For Ollama native API without /v1",
|
|
44
|
+
defaultBaseUrl: "http://127.0.0.1:11434"
|
|
45
|
+
}
|
|
46
|
+
];
|
|
47
|
+
|
|
48
|
+
await main();
|
|
49
|
+
|
|
50
|
+
async function main() {
|
|
51
|
+
console.clear();
|
|
52
|
+
p.intro(pc.bgBlue(pc.white(" OpenClaw Local Setup (安装器) ")));
|
|
53
|
+
|
|
54
|
+
const interfaceType = await p.select({
|
|
55
|
+
message: "Select the API interface type (选择接口类型)",
|
|
56
|
+
options: INTERFACE_TYPES.map((item) => ({
|
|
57
|
+
value: item.value,
|
|
58
|
+
label: item.label,
|
|
59
|
+
hint: item.hint
|
|
60
|
+
})),
|
|
61
|
+
initialValue: "openai-compatible"
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
if (p.isCancel(interfaceType)) {
|
|
65
|
+
exitCancelled();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const interfaceConfig = INTERFACE_TYPES.find((item) => item.value === interfaceType);
|
|
69
|
+
if (!interfaceConfig) {
|
|
70
|
+
throw new Error(`Unsupported interface type: ${interfaceType}`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const suggestedBaseUrl = await getSuggestedBaseUrl(interfaceConfig);
|
|
74
|
+
const baseUrl = await promptRequiredText({
|
|
75
|
+
message: "Enter the API base URL (接口地址)",
|
|
76
|
+
placeholder: suggestedBaseUrl,
|
|
77
|
+
initialValue: suggestedBaseUrl
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
const apiKeyDefault = interfaceType === "ollama-native" ? "ollama-local" : "";
|
|
81
|
+
const apiKey = await promptOptionalText({
|
|
82
|
+
message: "Enter the API key if needed (API Key 可按需留空)",
|
|
83
|
+
placeholder: apiKeyDefault || "sk-...",
|
|
84
|
+
initialValue: apiKeyDefault
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
const detectedPrimary = await tryDetectModelId(baseUrl, interfaceType);
|
|
88
|
+
const primaryModelId = await promptRequiredText({
|
|
89
|
+
message: "Enter the primary model ID (主模型 ID)",
|
|
90
|
+
placeholder: detectedPrimary || getSuggestedPrimaryModelId(interfaceType),
|
|
91
|
+
initialValue: detectedPrimary || getSuggestedPrimaryModelId(interfaceType)
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const fallbackModelId = await promptOptionalText({
|
|
95
|
+
message: "Enter the fallback model ID if you want one (备用模型可留空)",
|
|
96
|
+
placeholder: primaryModelId,
|
|
97
|
+
initialValue: primaryModelId
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
const config = buildOpenClawConfig({
|
|
101
|
+
interfaceType,
|
|
102
|
+
baseUrl,
|
|
103
|
+
apiKey: apiKey || apiKeyDefault,
|
|
104
|
+
primaryModelId,
|
|
105
|
+
fallbackModelId: fallbackModelId || primaryModelId
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
mkdirSync(workspaceDir, { recursive: true });
|
|
109
|
+
writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
|
110
|
+
writeFileSync(envPath, buildEnvFile(config.envEntries), "utf8");
|
|
111
|
+
|
|
112
|
+
const notes = [];
|
|
113
|
+
if (interfaceType === "openai-responses" && !isOfficialOpenAiBaseUrl(baseUrl)) {
|
|
114
|
+
notes.push("Custom Responses endpoints are written using OpenClaw's practical openai-completions compatibility mode.");
|
|
115
|
+
}
|
|
116
|
+
if (interfaceType === "ollama-native") {
|
|
117
|
+
notes.push("Ollama uses native API mode with no /v1 suffix so tool calling stays reliable.");
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
let outro = `${pc.green("OpenClaw project setup complete")}\n\n`;
|
|
121
|
+
outro += `${pc.cyan("Project")}: ${projectRoot}\n`;
|
|
122
|
+
outro += `${pc.cyan("OPENCLAW_HOME")}: ${openclawHome}\n`;
|
|
123
|
+
outro += `${pc.cyan("Config")}: ${configPath}\n`;
|
|
124
|
+
outro += `${pc.cyan("Env")}: ${envPath}\n`;
|
|
125
|
+
outro += `${pc.cyan("Primary model")}: ${config.primaryModelRef}\n`;
|
|
126
|
+
outro += `${pc.cyan("Fallback model")}: ${config.fallbackModelRef}\n`;
|
|
127
|
+
|
|
128
|
+
if (notes.length) {
|
|
129
|
+
outro += `\n${pc.yellow("Notes")}:\n- ${notes.join("\n- ")}`;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
p.outro(outro);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
async function getSuggestedBaseUrl(interfaceConfig) {
|
|
136
|
+
if (interfaceConfig.value === "ollama-native") {
|
|
137
|
+
return "http://127.0.0.1:11434";
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return interfaceConfig.defaultBaseUrl;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function buildOpenClawConfig({ interfaceType, baseUrl, apiKey, primaryModelId, fallbackModelId }) {
|
|
144
|
+
const providerId = getProviderId(interfaceType);
|
|
145
|
+
const envEntries = buildEnvEntries(interfaceType, apiKey);
|
|
146
|
+
const providerModelIds = new Set([primaryModelId, fallbackModelId].filter(Boolean));
|
|
147
|
+
const modelRefs = [...providerModelIds].map((id) => `${providerId}/${id}`);
|
|
148
|
+
const providerModels = [...providerModelIds].map((id) => ({
|
|
149
|
+
id,
|
|
150
|
+
name: id,
|
|
151
|
+
contextWindow: 131072,
|
|
152
|
+
maxTokens: 131072,
|
|
153
|
+
cost: {
|
|
154
|
+
input: 0,
|
|
155
|
+
output: 0,
|
|
156
|
+
cacheRead: 0,
|
|
157
|
+
cacheWrite: 0
|
|
158
|
+
}
|
|
159
|
+
}));
|
|
160
|
+
|
|
161
|
+
const config = {
|
|
162
|
+
agents: {
|
|
163
|
+
defaults: {
|
|
164
|
+
workspace: workspaceDir,
|
|
165
|
+
model: {
|
|
166
|
+
primary: `${providerId}/${primaryModelId}`,
|
|
167
|
+
fallbacks:
|
|
168
|
+
fallbackModelId && fallbackModelId !== primaryModelId
|
|
169
|
+
? [`${providerId}/${fallbackModelId}`]
|
|
170
|
+
: []
|
|
171
|
+
},
|
|
172
|
+
models: Object.fromEntries(
|
|
173
|
+
modelRefs.map((ref, index) => [
|
|
174
|
+
ref,
|
|
175
|
+
{ alias: index === 0 ? "Primary (主模型)" : "Fallback (备用模型)" }
|
|
176
|
+
])
|
|
177
|
+
)
|
|
178
|
+
}
|
|
179
|
+
},
|
|
180
|
+
models: {
|
|
181
|
+
mode: "merge",
|
|
182
|
+
providers: {
|
|
183
|
+
[providerId]: buildProviderConfig({
|
|
184
|
+
providerId,
|
|
185
|
+
interfaceType,
|
|
186
|
+
baseUrl,
|
|
187
|
+
providerModels
|
|
188
|
+
})
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
};
|
|
192
|
+
|
|
193
|
+
return {
|
|
194
|
+
...config,
|
|
195
|
+
envEntries,
|
|
196
|
+
primaryModelRef: `${providerId}/${primaryModelId}`,
|
|
197
|
+
fallbackModelRef:
|
|
198
|
+
fallbackModelId && fallbackModelId !== primaryModelId
|
|
199
|
+
? `${providerId}/${fallbackModelId}`
|
|
200
|
+
: `${providerId}/${primaryModelId}`
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
function buildProviderConfig({ providerId, interfaceType, baseUrl, providerModels }) {
|
|
205
|
+
if (interfaceType === "google-gemini") {
|
|
206
|
+
return {
|
|
207
|
+
baseUrl,
|
|
208
|
+
apiKey: "${OPENCLAW_MODEL_API_KEY}",
|
|
209
|
+
models: providerModels
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
if (interfaceType === "ollama-native") {
|
|
214
|
+
return {
|
|
215
|
+
baseUrl: stripTrailingSlash(baseUrl),
|
|
216
|
+
apiKey: "${OLLAMA_API_KEY}",
|
|
217
|
+
api: "ollama",
|
|
218
|
+
models: providerModels
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (interfaceType === "anthropic-native") {
|
|
223
|
+
return {
|
|
224
|
+
baseUrl: stripTrailingSlash(baseUrl),
|
|
225
|
+
apiKey: "${OPENCLAW_MODEL_API_KEY}",
|
|
226
|
+
api: "anthropic-messages",
|
|
227
|
+
models: providerModels
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
baseUrl: ensureV1BaseUrl(baseUrl),
|
|
233
|
+
apiKey: "${OPENCLAW_MODEL_API_KEY}",
|
|
234
|
+
api: "openai-completions",
|
|
235
|
+
models: providerModels
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
function buildEnvEntries(interfaceType, apiKey) {
|
|
240
|
+
if (interfaceType === "ollama-native") {
|
|
241
|
+
return {
|
|
242
|
+
OLLAMA_API_KEY: apiKey || "ollama-local"
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
if (interfaceType === "google-gemini") {
|
|
247
|
+
return {
|
|
248
|
+
OPENCLAW_MODEL_API_KEY: apiKey
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
return {
|
|
253
|
+
OPENCLAW_MODEL_API_KEY: apiKey
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
function buildEnvFile(entries) {
|
|
258
|
+
const lines = Object.entries(entries)
|
|
259
|
+
.filter(([, value]) => typeof value === "string" && value.length > 0)
|
|
260
|
+
.map(([key, value]) => `${key}=${escapeEnvValue(value)}`);
|
|
261
|
+
|
|
262
|
+
return `${lines.join("\n")}\n`;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
function escapeEnvValue(value) {
|
|
266
|
+
return JSON.stringify(String(value));
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
function getProviderId(interfaceType) {
|
|
270
|
+
switch (interfaceType) {
|
|
271
|
+
case "anthropic-native":
|
|
272
|
+
return "customanthropic";
|
|
273
|
+
case "google-gemini":
|
|
274
|
+
return "google";
|
|
275
|
+
case "ollama-native":
|
|
276
|
+
return "ollama";
|
|
277
|
+
case "openai-responses":
|
|
278
|
+
return "customopenairesponses";
|
|
279
|
+
case "openai-compatible":
|
|
280
|
+
default:
|
|
281
|
+
return "customopenai";
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
function getSuggestedPrimaryModelId(interfaceType) {
|
|
286
|
+
switch (interfaceType) {
|
|
287
|
+
case "anthropic-native":
|
|
288
|
+
return "claude-sonnet-4-5";
|
|
289
|
+
case "google-gemini":
|
|
290
|
+
return "gemini-2.5-pro";
|
|
291
|
+
case "ollama-native":
|
|
292
|
+
return "llama3.2";
|
|
293
|
+
case "openai-responses":
|
|
294
|
+
case "openai-compatible":
|
|
295
|
+
default:
|
|
296
|
+
return "gpt-4.1";
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
async function tryDetectModelId(baseUrl, interfaceType) {
|
|
301
|
+
const urls = [];
|
|
302
|
+
const normalized = stripTrailingSlash(baseUrl);
|
|
303
|
+
if (!normalized) {
|
|
304
|
+
return null;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
if (interfaceType === "ollama-native") {
|
|
308
|
+
urls.push(`${normalized}/api/tags`);
|
|
309
|
+
} else {
|
|
310
|
+
urls.push(`${ensureV1BaseUrl(normalized)}/models`);
|
|
311
|
+
urls.push(`${normalized}/api/tags`);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
for (const url of urls) {
|
|
315
|
+
try {
|
|
316
|
+
const controller = new AbortController();
|
|
317
|
+
const timeout = setTimeout(() => controller.abort(), 1500);
|
|
318
|
+
const response = await fetch(url, { signal: controller.signal });
|
|
319
|
+
clearTimeout(timeout);
|
|
320
|
+
if (!response.ok) {
|
|
321
|
+
continue;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
const payload = await response.json();
|
|
325
|
+
const v1Id = payload?.data?.[0]?.id;
|
|
326
|
+
if (typeof v1Id === "string" && v1Id.trim()) {
|
|
327
|
+
return v1Id.trim();
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
const tagId = payload?.models?.[0]?.name ?? payload?.models?.[0]?.model;
|
|
331
|
+
if (typeof tagId === "string" && tagId.trim()) {
|
|
332
|
+
return tagId.trim();
|
|
333
|
+
}
|
|
334
|
+
} catch {
|
|
335
|
+
// Best-effort only.
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return null;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
function ensureV1BaseUrl(value) {
|
|
343
|
+
const normalized = stripTrailingSlash(value);
|
|
344
|
+
return normalized.endsWith("/v1") ? normalized : `${normalized}/v1`;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
function stripTrailingSlash(value) {
|
|
348
|
+
return String(value ?? "").replace(/\/+$/, "");
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
function isOfficialOpenAiBaseUrl(value) {
|
|
352
|
+
return /https:\/\/api\.openai\.com\/?v1?$/i.test(stripTrailingSlash(value));
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
async function promptRequiredText({ message, placeholder, initialValue }) {
|
|
356
|
+
const answer = await p.text({
|
|
357
|
+
message,
|
|
358
|
+
placeholder,
|
|
359
|
+
initialValue,
|
|
360
|
+
validate: (value) => {
|
|
361
|
+
if (!value || !value.trim()) {
|
|
362
|
+
return "This field is required.";
|
|
363
|
+
}
|
|
364
|
+
return undefined;
|
|
365
|
+
}
|
|
366
|
+
});
|
|
367
|
+
|
|
368
|
+
if (p.isCancel(answer)) {
|
|
369
|
+
exitCancelled();
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
return answer.trim();
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
async function promptOptionalText({ message, placeholder, initialValue }) {
|
|
376
|
+
const answer = await p.text({
|
|
377
|
+
message,
|
|
378
|
+
placeholder,
|
|
379
|
+
initialValue,
|
|
380
|
+
validate: () => undefined
|
|
381
|
+
});
|
|
382
|
+
|
|
383
|
+
if (p.isCancel(answer)) {
|
|
384
|
+
exitCancelled();
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return answer.trim();
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
function exitCancelled() {
|
|
391
|
+
p.cancel("Operation cancelled");
|
|
392
|
+
process.exit(0);
|
|
393
|
+
}
|