@lark-apaas/openclaw-scripts-diagnose-cli 0.1.1-alpha.3 → 0.1.1-alpha.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -1
- package/dist/index.cjs +1880 -232
- package/package.json +6 -2
- package/template/openclaw.json +539 -0
- package/template/scripts/restart.sh +37 -0
- package/template/scripts/start.sh +6 -0
- package/template/scripts/stop.sh +2 -0
package/dist/index.cjs
CHANGED
|
@@ -31,6 +31,14 @@ let node_path = require("node:path");
|
|
|
31
31
|
node_path = __toESM(node_path);
|
|
32
32
|
let node_child_process = require("node:child_process");
|
|
33
33
|
let node_crypto = require("node:crypto");
|
|
34
|
+
node_crypto = __toESM(node_crypto);
|
|
35
|
+
let node_os = require("node:os");
|
|
36
|
+
node_os = __toESM(node_os);
|
|
37
|
+
let node_stream = require("node:stream");
|
|
38
|
+
let node_stream_promises = require("node:stream/promises");
|
|
39
|
+
let node_assert = require("node:assert");
|
|
40
|
+
node_assert = __toESM(node_assert);
|
|
41
|
+
let _lark_apaas_http_client = require("@lark-apaas/http-client");
|
|
34
42
|
//#region src/rule-engine/base.ts
|
|
35
43
|
/** Abstract base class for all diagnose rules */
|
|
36
44
|
var DiagnoseRule = class {
|
|
@@ -127,6 +135,14 @@ function isValidJWT(token) {
|
|
|
127
135
|
return false;
|
|
128
136
|
}
|
|
129
137
|
}
|
|
138
|
+
/**
|
|
139
|
+
* Return `val` as a plain-object record (non-null, non-array object), or
|
|
140
|
+
* `undefined` otherwise. Cheaper than `getNestedMap` when the value is already
|
|
141
|
+
* at hand.
|
|
142
|
+
*/
|
|
143
|
+
function asRecord(val) {
|
|
144
|
+
return val != null && typeof val === "object" && !Array.isArray(val) ? val : void 0;
|
|
145
|
+
}
|
|
130
146
|
/** Set a deeply nested value, creating intermediate objects as needed. */
|
|
131
147
|
function setNestedValue(obj, keys, value) {
|
|
132
148
|
let current = obj;
|
|
@@ -192,14 +208,14 @@ function fileExists(filePath) {
|
|
|
192
208
|
return node_fs.default.existsSync(filePath);
|
|
193
209
|
}
|
|
194
210
|
/** Execute a shell command, return stdout. Throws on failure. */
|
|
195
|
-
function shell(cmd, timeoutMs =
|
|
211
|
+
function shell(cmd, timeoutMs = 6e4) {
|
|
196
212
|
return (0, node_child_process.execSync)(cmd, {
|
|
197
213
|
encoding: "utf-8",
|
|
198
214
|
timeout: timeoutMs
|
|
199
215
|
}).trim();
|
|
200
216
|
}
|
|
201
217
|
//#endregion
|
|
202
|
-
//#region \0@oxc-project+runtime@0.
|
|
218
|
+
//#region \0@oxc-project+runtime@0.115.0/helpers/decorate.js
|
|
203
219
|
function __decorate(decorators, target, key, desc) {
|
|
204
220
|
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
205
221
|
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
@@ -245,11 +261,15 @@ function findBackupFiles(configPath) {
|
|
|
245
261
|
}
|
|
246
262
|
/**
|
|
247
263
|
* Among backup files, find the one with the highest numeric suffix.
|
|
248
|
-
*
|
|
264
|
+
* Supports all three naming styles used by the current backup code and its
|
|
265
|
+
* older variants:
|
|
266
|
+
* `.bak` → n = 0 (legacy single-slot backup)
|
|
267
|
+
* `.bakN` → n = N (older style, dot-less)
|
|
268
|
+
* `.bak.N` → n = N (current style written by reset Step 1)
|
|
249
269
|
*/
|
|
250
270
|
function findHighestBackup(backupFiles) {
|
|
251
271
|
if (backupFiles.length === 0) return null;
|
|
252
|
-
const bakRegex = /\.bak(\d*)$/;
|
|
272
|
+
const bakRegex = /\.bak\.?(\d*)$/;
|
|
253
273
|
let best = null;
|
|
254
274
|
for (const f of backupFiles) {
|
|
255
275
|
const match = bakRegex.exec(f);
|
|
@@ -264,7 +284,7 @@ function findHighestBackup(backupFiles) {
|
|
|
264
284
|
}
|
|
265
285
|
let ConfigFileBackupRule = class ConfigFileBackupRule extends DiagnoseRule {
|
|
266
286
|
validate(ctx) {
|
|
267
|
-
const configPath = ctx
|
|
287
|
+
const { configPath } = ctx;
|
|
268
288
|
if (!configPath) return {
|
|
269
289
|
pass: false,
|
|
270
290
|
message: "configPath not provided"
|
|
@@ -277,7 +297,7 @@ let ConfigFileBackupRule = class ConfigFileBackupRule extends DiagnoseRule {
|
|
|
277
297
|
return { pass: true };
|
|
278
298
|
}
|
|
279
299
|
repair(ctx) {
|
|
280
|
-
const configPath = ctx
|
|
300
|
+
const { configPath } = ctx;
|
|
281
301
|
if (!configPath) return;
|
|
282
302
|
const best = findHighestBackup(findBackupFiles(configPath));
|
|
283
303
|
if (!best) return;
|
|
@@ -306,7 +326,7 @@ function hasBackupFiles(configPath) {
|
|
|
306
326
|
}
|
|
307
327
|
let ConfigFileMissingRule = class ConfigFileMissingRule extends DiagnoseRule {
|
|
308
328
|
validate(ctx) {
|
|
309
|
-
const configPath = ctx
|
|
329
|
+
const { configPath } = ctx;
|
|
310
330
|
if (!configPath) return {
|
|
311
331
|
pass: false,
|
|
312
332
|
message: "configPath not provided"
|
|
@@ -328,7 +348,7 @@ ConfigFileMissingRule = __decorate([Rule({
|
|
|
328
348
|
//#region src/rules/config-syntax.ts
|
|
329
349
|
let ConfigSyntaxRule = class ConfigSyntaxRule extends DiagnoseRule {
|
|
330
350
|
validate(ctx) {
|
|
331
|
-
const configPath = ctx
|
|
351
|
+
const { configPath } = ctx;
|
|
332
352
|
if (!fileExists(configPath)) return { pass: true };
|
|
333
353
|
try {
|
|
334
354
|
loadJSON5().parse(readFile(configPath));
|
|
@@ -347,6 +367,74 @@ ConfigSyntaxRule = __decorate([Rule({
|
|
|
347
367
|
repairMode: "ai"
|
|
348
368
|
})], ConfigSyntaxRule);
|
|
349
369
|
//#endregion
|
|
370
|
+
//#region src/rules/template-vars-unreplaced.ts
|
|
371
|
+
/**
|
|
372
|
+
* Placeholder format used by miaoda-openclaw-template and Go-side templateVars,
|
|
373
|
+
* e.g. `$$__FEISHU_APP_ID__`. Double underscores on both sides act as a natural
|
|
374
|
+
* boundary so split-join replacement can't accidentally overlap between keys.
|
|
375
|
+
*/
|
|
376
|
+
const PLACEHOLDER_RE = /\$\$__[A-Z0-9_]+__/g;
|
|
377
|
+
let TemplateVarsUnreplacedRule = class TemplateVarsUnreplacedRule extends DiagnoseRule {
|
|
378
|
+
validate(ctx) {
|
|
379
|
+
const found = /* @__PURE__ */ new Set();
|
|
380
|
+
collectPlaceholders(ctx.config, found);
|
|
381
|
+
if (found.size === 0) return { pass: true };
|
|
382
|
+
return {
|
|
383
|
+
pass: false,
|
|
384
|
+
message: "存在未替换的模板占位符: " + [...found].sort().join(", ")
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
repair(ctx) {
|
|
388
|
+
const map = ctx.templateVars;
|
|
389
|
+
if (!map || Object.keys(map).length === 0) return;
|
|
390
|
+
replaceInPlace(ctx.config, Object.entries(map));
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
TemplateVarsUnreplacedRule = __decorate([Rule({
|
|
394
|
+
key: "template_vars_unreplaced",
|
|
395
|
+
dependsOn: ["config_syntax_check"],
|
|
396
|
+
repairMode: "standard"
|
|
397
|
+
})], TemplateVarsUnreplacedRule);
|
|
398
|
+
function collectPlaceholders(value, found) {
|
|
399
|
+
if (typeof value === "string") {
|
|
400
|
+
const matches = value.match(PLACEHOLDER_RE);
|
|
401
|
+
if (matches) for (const m of matches) found.add(m);
|
|
402
|
+
return;
|
|
403
|
+
}
|
|
404
|
+
if (Array.isArray(value)) {
|
|
405
|
+
for (const v of value) collectPlaceholders(v, found);
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
if (value && typeof value === "object") for (const v of Object.values(value)) collectPlaceholders(v, found);
|
|
409
|
+
}
|
|
410
|
+
function replaceInPlace(value, entries) {
|
|
411
|
+
if (Array.isArray(value)) {
|
|
412
|
+
for (let i = 0; i < value.length; i++) {
|
|
413
|
+
const el = value[i];
|
|
414
|
+
if (typeof el === "string") value[i] = applyVars(el, entries);
|
|
415
|
+
else replaceInPlace(el, entries);
|
|
416
|
+
}
|
|
417
|
+
return;
|
|
418
|
+
}
|
|
419
|
+
if (value && typeof value === "object") {
|
|
420
|
+
const obj = value;
|
|
421
|
+
for (const key of Object.keys(obj)) {
|
|
422
|
+
const v = obj[key];
|
|
423
|
+
if (typeof v === "string") obj[key] = applyVars(v, entries);
|
|
424
|
+
else replaceInPlace(v, entries);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
/** Split-join replacement — matches the algorithm in reset.ts:120 and avoids regex-escaping `$$`. */
|
|
429
|
+
function applyVars(str, entries) {
|
|
430
|
+
let out = str;
|
|
431
|
+
for (const [placeholder, value] of entries) {
|
|
432
|
+
if (!value) continue;
|
|
433
|
+
if (out.includes(placeholder)) out = out.split(placeholder).join(value);
|
|
434
|
+
}
|
|
435
|
+
return out;
|
|
436
|
+
}
|
|
437
|
+
//#endregion
|
|
350
438
|
//#region src/rules/model-provider.ts
|
|
351
439
|
var _ModelProviderRule;
|
|
352
440
|
let ModelProviderRule = class ModelProviderRule extends DiagnoseRule {
|
|
@@ -596,6 +684,19 @@ FeishuChannelRule = _FeishuChannelRule = __decorate([Rule({
|
|
|
596
684
|
repairMode: "standard"
|
|
597
685
|
})], FeishuChannelRule);
|
|
598
686
|
//#endregion
|
|
687
|
+
//#region src/rules/feishu-default-account.ts
|
|
688
|
+
let FeishuDefaultAccountRule = class FeishuDefaultAccountRule extends DiagnoseRule {
|
|
689
|
+
validate(_ctx) {
|
|
690
|
+
return { pass: true };
|
|
691
|
+
}
|
|
692
|
+
repair(_ctx) {}
|
|
693
|
+
};
|
|
694
|
+
FeishuDefaultAccountRule = __decorate([Rule({
|
|
695
|
+
key: "feishu_default_account",
|
|
696
|
+
dependsOn: ["config_syntax_check"],
|
|
697
|
+
repairMode: "standard"
|
|
698
|
+
})], FeishuDefaultAccountRule);
|
|
699
|
+
//#endregion
|
|
599
700
|
//#region src/rules/gateway.ts
|
|
600
701
|
var _GatewayRule;
|
|
601
702
|
let GatewayRule = class GatewayRule extends DiagnoseRule {
|
|
@@ -686,7 +787,9 @@ let AllowedOriginsRule = class AllowedOriginsRule extends DiagnoseRule {
|
|
|
686
787
|
validate(ctx) {
|
|
687
788
|
const expected = getExpectedOrigins(ctx.vars);
|
|
688
789
|
if (expected.length === 0) return { pass: true };
|
|
689
|
-
const
|
|
790
|
+
const current = getCurrentOrigins(ctx.config);
|
|
791
|
+
if (hasWildcard(current)) return { pass: true };
|
|
792
|
+
const missing = findMissing(current, expected);
|
|
690
793
|
if (missing.length === 0) return { pass: true };
|
|
691
794
|
return {
|
|
692
795
|
pass: false,
|
|
@@ -696,6 +799,7 @@ let AllowedOriginsRule = class AllowedOriginsRule extends DiagnoseRule {
|
|
|
696
799
|
repair(ctx) {
|
|
697
800
|
const expected = getExpectedOrigins(ctx.vars);
|
|
698
801
|
const current = getCurrentOrigins(ctx.config);
|
|
802
|
+
if (hasWildcard(current)) return;
|
|
699
803
|
const missing = findMissing(current, expected);
|
|
700
804
|
if (missing.length > 0) {
|
|
701
805
|
const seen = /* @__PURE__ */ new Set();
|
|
@@ -735,6 +839,10 @@ function findMissing(current, expected) {
|
|
|
735
839
|
const set = new Set(current);
|
|
736
840
|
return expected.filter((o) => !set.has(o));
|
|
737
841
|
}
|
|
842
|
+
/** Exact "*" entry means allow-all; pattern globs like "https://*.example.com" don't count. */
|
|
843
|
+
function hasWildcard(origins) {
|
|
844
|
+
return origins.includes("*");
|
|
845
|
+
}
|
|
738
846
|
//#endregion
|
|
739
847
|
//#region src/rules/jwt-token.ts
|
|
740
848
|
let JwtTokenRule = class JwtTokenRule extends DiagnoseRule {
|
|
@@ -852,6 +960,217 @@ SecretsRule = __decorate([Rule({
|
|
|
852
960
|
skipWhen: ({ hasMiaoda, deps }) => !hasMiaoda || !deps.usesMiaodaSecretProvider
|
|
853
961
|
})], SecretsRule);
|
|
854
962
|
//#endregion
|
|
963
|
+
//#region src/rules/cleanup-install-backup-dirs.ts
|
|
964
|
+
const DIR_PREFIX = ".openclaw-install-";
|
|
965
|
+
function resolveExtensionsDir(configPath) {
|
|
966
|
+
return node_path.default.join(node_path.default.dirname(configPath), "extensions");
|
|
967
|
+
}
|
|
968
|
+
function findLeftoverDirs(extensionsDir) {
|
|
969
|
+
if (!fileExists(extensionsDir)) return [];
|
|
970
|
+
let entries;
|
|
971
|
+
try {
|
|
972
|
+
entries = node_fs.default.readdirSync(extensionsDir, { withFileTypes: true });
|
|
973
|
+
} catch {
|
|
974
|
+
return [];
|
|
975
|
+
}
|
|
976
|
+
return entries.filter((e) => e.isDirectory() && e.name.startsWith(DIR_PREFIX)).map((e) => node_path.default.join(extensionsDir, e.name));
|
|
977
|
+
}
|
|
978
|
+
let CleanupInstallBackupDirsRule = class CleanupInstallBackupDirsRule extends DiagnoseRule {
|
|
979
|
+
validate(ctx) {
|
|
980
|
+
const { configPath } = ctx;
|
|
981
|
+
if (!configPath) return { pass: true };
|
|
982
|
+
const dirs = findLeftoverDirs(resolveExtensionsDir(configPath));
|
|
983
|
+
if (dirs.length === 0) return { pass: true };
|
|
984
|
+
return {
|
|
985
|
+
pass: false,
|
|
986
|
+
message: `extensions 目录下发现 ${dirs.length} 个 ${DIR_PREFIX}* 脏目录需要清理`
|
|
987
|
+
};
|
|
988
|
+
}
|
|
989
|
+
repair(ctx) {
|
|
990
|
+
const { configPath } = ctx;
|
|
991
|
+
if (!configPath) return;
|
|
992
|
+
const dirs = findLeftoverDirs(resolveExtensionsDir(configPath));
|
|
993
|
+
const failures = [];
|
|
994
|
+
for (const dir of dirs) try {
|
|
995
|
+
node_fs.default.rmSync(dir, {
|
|
996
|
+
recursive: true,
|
|
997
|
+
force: true
|
|
998
|
+
});
|
|
999
|
+
} catch (e) {
|
|
1000
|
+
failures.push(`${node_path.default.basename(dir)}: ${e.message}`);
|
|
1001
|
+
}
|
|
1002
|
+
if (dirs.length > 0 && failures.length === dirs.length) throw new Error(`cleanup_install_backup_dirs: 全部清理失败: ${failures.join("; ")}`);
|
|
1003
|
+
}
|
|
1004
|
+
};
|
|
1005
|
+
CleanupInstallBackupDirsRule = __decorate([Rule({
|
|
1006
|
+
key: "cleanup_install_backup_dirs",
|
|
1007
|
+
repairMode: "standard"
|
|
1008
|
+
})], CleanupInstallBackupDirsRule);
|
|
1009
|
+
//#endregion
|
|
1010
|
+
//#region src/rules/miaoda-official-plugins-install-spec-unlock.ts
|
|
1011
|
+
/**
|
|
1012
|
+
* Official miaoda-side plugins that must track manifest — version-locked specs
|
|
1013
|
+
* here block upgrades. Third-party / user-installed plugins are intentionally
|
|
1014
|
+
* out of scope (users may pin them deliberately).
|
|
1015
|
+
*/
|
|
1016
|
+
const OFFICIAL_PLUGIN_NAMES = new Set([
|
|
1017
|
+
"openclaw-extension-miaoda",
|
|
1018
|
+
"openclaw-extension-miaoda-coding",
|
|
1019
|
+
"openclaw-guardian-plugin",
|
|
1020
|
+
"openclaw-mem0-plugin",
|
|
1021
|
+
"openclaw-lark"
|
|
1022
|
+
]);
|
|
1023
|
+
const LOCKED_NPM_SPEC = /^(@[a-z0-9][\w.-]*\/)?[a-z0-9][\w.-]*@[^@/:#\s]+$/i;
|
|
1024
|
+
function isLockedNpmSpec(spec) {
|
|
1025
|
+
return typeof spec === "string" && LOCKED_NPM_SPEC.test(spec);
|
|
1026
|
+
}
|
|
1027
|
+
function unlockSpec(spec) {
|
|
1028
|
+
const slash = spec.indexOf("/");
|
|
1029
|
+
const cut = slash === -1 ? spec.indexOf("@") : spec.indexOf("@", slash + 1);
|
|
1030
|
+
return spec.slice(0, cut);
|
|
1031
|
+
}
|
|
1032
|
+
/** Yield `[key, lockedSpec]` for every official-plugin install whose `spec` is locked. */
|
|
1033
|
+
function* iterLockedOfficialInstalls(config) {
|
|
1034
|
+
const installs = getNestedMap(config, "plugins", "installs");
|
|
1035
|
+
if (!installs) return;
|
|
1036
|
+
for (const [key, entry] of Object.entries(installs)) {
|
|
1037
|
+
if (!OFFICIAL_PLUGIN_NAMES.has(key)) continue;
|
|
1038
|
+
const spec = asRecord(entry)?.spec;
|
|
1039
|
+
if (isLockedNpmSpec(spec)) yield [key, spec];
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
let MiaodaOfficialPluginsInstallSpecUnlockRule = class MiaodaOfficialPluginsInstallSpecUnlockRule extends DiagnoseRule {
|
|
1043
|
+
validate(ctx) {
|
|
1044
|
+
const locked = [...iterLockedOfficialInstalls(ctx.config)].map(([k]) => k);
|
|
1045
|
+
if (locked.length === 0) return { pass: true };
|
|
1046
|
+
return {
|
|
1047
|
+
pass: false,
|
|
1048
|
+
message: "plugins.installs 中官方插件存在锁版本的 spec: " + locked.sort().join(",")
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
repair(ctx) {
|
|
1052
|
+
for (const [key, spec] of iterLockedOfficialInstalls(ctx.config)) setNestedValue(ctx.config, [
|
|
1053
|
+
"plugins",
|
|
1054
|
+
"installs",
|
|
1055
|
+
key,
|
|
1056
|
+
"spec"
|
|
1057
|
+
], unlockSpec(spec));
|
|
1058
|
+
}
|
|
1059
|
+
};
|
|
1060
|
+
MiaodaOfficialPluginsInstallSpecUnlockRule = __decorate([Rule({
|
|
1061
|
+
key: "miaoda_official_plugins_install_spec_unlock",
|
|
1062
|
+
dependsOn: ["config_syntax_check"],
|
|
1063
|
+
repairMode: "standard"
|
|
1064
|
+
})], MiaodaOfficialPluginsInstallSpecUnlockRule);
|
|
1065
|
+
//#endregion
|
|
1066
|
+
//#region src/rules/old-miaoda-plugins-cleanup.ts
|
|
1067
|
+
const NEW_MIAODA = "openclaw-extension-miaoda";
|
|
1068
|
+
const OLD_PLUGIN_NAMES = Object.freeze([
|
|
1069
|
+
"openclaw-feishu-greeting",
|
|
1070
|
+
"openclaw-miaoda-keepalive",
|
|
1071
|
+
"feishu-greeting",
|
|
1072
|
+
"miaoda-keepalive"
|
|
1073
|
+
]);
|
|
1074
|
+
function getPluginMaps(config) {
|
|
1075
|
+
const rawAllow = asRecord(config.plugins)?.allow;
|
|
1076
|
+
return {
|
|
1077
|
+
entries: getNestedMap(config, "plugins", "entries"),
|
|
1078
|
+
installs: getNestedMap(config, "plugins", "installs"),
|
|
1079
|
+
allow: Array.isArray(rawAllow) ? rawAllow : void 0
|
|
1080
|
+
};
|
|
1081
|
+
}
|
|
1082
|
+
function getExtensionsDir(configPath) {
|
|
1083
|
+
return node_path.default.join(node_path.default.dirname(configPath), "extensions");
|
|
1084
|
+
}
|
|
1085
|
+
function hasNewMiaoda({ entries, installs, allow }) {
|
|
1086
|
+
return asRecord(entries?.[NEW_MIAODA]) != null || asRecord(installs?.[NEW_MIAODA]) != null || (allow?.includes(NEW_MIAODA) ?? false);
|
|
1087
|
+
}
|
|
1088
|
+
function findResiduals({ entries, installs, allow }, extensionsDir) {
|
|
1089
|
+
return OLD_PLUGIN_NAMES.filter((name) => entries?.[name] != null || installs?.[name] != null || (allow?.includes(name) ?? false) || node_fs.default.existsSync(node_path.default.join(extensionsDir, name)));
|
|
1090
|
+
}
|
|
1091
|
+
let OldMiaodaPluginsCleanupRule = class OldMiaodaPluginsCleanupRule extends DiagnoseRule {
|
|
1092
|
+
validate(ctx) {
|
|
1093
|
+
const maps = getPluginMaps(ctx.config);
|
|
1094
|
+
if (!hasNewMiaoda(maps)) return { pass: true };
|
|
1095
|
+
const residuals = findResiduals(maps, getExtensionsDir(ctx.configPath));
|
|
1096
|
+
if (residuals.length === 0) return { pass: true };
|
|
1097
|
+
return {
|
|
1098
|
+
pass: false,
|
|
1099
|
+
message: "旧 miaoda 插件残留: " + residuals.sort().join(",")
|
|
1100
|
+
};
|
|
1101
|
+
}
|
|
1102
|
+
repair(ctx) {
|
|
1103
|
+
const maps = getPluginMaps(ctx.config);
|
|
1104
|
+
if (!hasNewMiaoda(maps)) return;
|
|
1105
|
+
const extensionsDir = getExtensionsDir(ctx.configPath);
|
|
1106
|
+
const { entries, installs, allow } = maps;
|
|
1107
|
+
const oldSet = new Set(OLD_PLUGIN_NAMES);
|
|
1108
|
+
if (allow) for (let i = allow.length - 1; i >= 0; i--) {
|
|
1109
|
+
const v = allow[i];
|
|
1110
|
+
if (typeof v === "string" && oldSet.has(v)) allow.splice(i, 1);
|
|
1111
|
+
}
|
|
1112
|
+
for (const name of OLD_PLUGIN_NAMES) {
|
|
1113
|
+
if (entries && name in entries) delete entries[name];
|
|
1114
|
+
if (installs && name in installs) delete installs[name];
|
|
1115
|
+
const target = node_path.default.join(extensionsDir, name);
|
|
1116
|
+
const rel = node_path.default.relative(extensionsDir, target);
|
|
1117
|
+
if (!rel || rel.startsWith("..") || node_path.default.isAbsolute(rel)) continue;
|
|
1118
|
+
try {
|
|
1119
|
+
node_fs.default.rmSync(target, {
|
|
1120
|
+
recursive: true,
|
|
1121
|
+
force: true
|
|
1122
|
+
});
|
|
1123
|
+
} catch (e) {
|
|
1124
|
+
console.error(`[old_miaoda_plugins_cleanup] rmSync ${target} failed: ${e.message}`);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
};
|
|
1129
|
+
OldMiaodaPluginsCleanupRule = __decorate([Rule({
|
|
1130
|
+
key: "old_miaoda_plugins_cleanup",
|
|
1131
|
+
dependsOn: ["config_syntax_check"],
|
|
1132
|
+
repairMode: "standard"
|
|
1133
|
+
})], OldMiaodaPluginsCleanupRule);
|
|
1134
|
+
//#endregion
|
|
1135
|
+
//#region src/rules/lark-plugin-allow.ts
|
|
1136
|
+
const LARK_PLUGIN = "openclaw-lark";
|
|
1137
|
+
const LARK_PLUGIN_NAMES = [LARK_PLUGIN, "feishu-openclaw-plugin"];
|
|
1138
|
+
let LarkPluginAllowRule = class LarkPluginAllowRule extends DiagnoseRule {
|
|
1139
|
+
validate(ctx) {
|
|
1140
|
+
const allow = getAllow(ctx.config);
|
|
1141
|
+
if (LARK_PLUGIN_NAMES.some((name) => allow.includes(name))) return { pass: true };
|
|
1142
|
+
return {
|
|
1143
|
+
pass: false,
|
|
1144
|
+
message: `plugins.allow 缺少飞书插件 (expected one of: ${LARK_PLUGIN_NAMES.join(", ")})`
|
|
1145
|
+
};
|
|
1146
|
+
}
|
|
1147
|
+
repair(ctx) {
|
|
1148
|
+
if (ctx.config.plugins == null || typeof ctx.config.plugins !== "object" || Array.isArray(ctx.config.plugins)) {
|
|
1149
|
+
ctx.config.plugins = { allow: [LARK_PLUGIN] };
|
|
1150
|
+
return;
|
|
1151
|
+
}
|
|
1152
|
+
const pluginsMap = ctx.config.plugins;
|
|
1153
|
+
const rawAllow = pluginsMap.allow;
|
|
1154
|
+
const original = Array.isArray(rawAllow) ? rawAllow : [];
|
|
1155
|
+
const stringAllow = original.filter((e) => typeof e === "string");
|
|
1156
|
+
if (LARK_PLUGIN_NAMES.some((name) => stringAllow.includes(name))) return;
|
|
1157
|
+
original.push(LARK_PLUGIN);
|
|
1158
|
+
pluginsMap.allow = original;
|
|
1159
|
+
}
|
|
1160
|
+
};
|
|
1161
|
+
LarkPluginAllowRule = __decorate([Rule({
|
|
1162
|
+
key: "lark_plugin_allow",
|
|
1163
|
+
dependsOn: ["config_syntax_check"],
|
|
1164
|
+
repairMode: "standard"
|
|
1165
|
+
})], LarkPluginAllowRule);
|
|
1166
|
+
function getAllow(config) {
|
|
1167
|
+
const plugins = config.plugins;
|
|
1168
|
+
if (plugins == null || typeof plugins !== "object" || Array.isArray(plugins)) return [];
|
|
1169
|
+
const allow = plugins.allow;
|
|
1170
|
+
if (!Array.isArray(allow)) return [];
|
|
1171
|
+
return allow.filter((e) => typeof e === "string");
|
|
1172
|
+
}
|
|
1173
|
+
//#endregion
|
|
855
1174
|
//#region src/check.ts
|
|
856
1175
|
function runCheck(input) {
|
|
857
1176
|
const result = { failedRules: {
|
|
@@ -864,12 +1183,14 @@ function runCheck(input) {
|
|
|
864
1183
|
const failedKeys = /* @__PURE__ */ new Set();
|
|
865
1184
|
let configParsed = false;
|
|
866
1185
|
let ctx = {
|
|
867
|
-
config: {
|
|
1186
|
+
config: {},
|
|
1187
|
+
configPath: input.configPath,
|
|
868
1188
|
vars: input.vars,
|
|
869
1189
|
providerDeps: {
|
|
870
1190
|
usesMiaodaProvider: false,
|
|
871
1191
|
usesMiaodaSecretProvider: false
|
|
872
|
-
}
|
|
1192
|
+
},
|
|
1193
|
+
templateVars: input.templateVars
|
|
873
1194
|
};
|
|
874
1195
|
for (const rule of rules) {
|
|
875
1196
|
const meta = rule.meta;
|
|
@@ -880,8 +1201,10 @@ function runCheck(input) {
|
|
|
880
1201
|
const deps = analyzeProviderDeps(parsed);
|
|
881
1202
|
ctx = {
|
|
882
1203
|
config: parsed,
|
|
1204
|
+
configPath: input.configPath,
|
|
883
1205
|
vars: input.vars,
|
|
884
|
-
providerDeps: deps
|
|
1206
|
+
providerDeps: deps,
|
|
1207
|
+
templateVars: input.templateVars
|
|
885
1208
|
};
|
|
886
1209
|
configParsed = true;
|
|
887
1210
|
} catch {
|
|
@@ -933,12 +1256,14 @@ function runRepair(input) {
|
|
|
933
1256
|
if (rule.meta.repairMode !== "standard") continue;
|
|
934
1257
|
if (rule.meta.dependsOn?.includes("config_syntax_check")) continue;
|
|
935
1258
|
rule.repair({
|
|
936
|
-
config: {
|
|
1259
|
+
config: {},
|
|
1260
|
+
configPath: input.configPath,
|
|
937
1261
|
vars: input.vars,
|
|
938
1262
|
providerDeps: {
|
|
939
1263
|
usesMiaodaProvider: false,
|
|
940
1264
|
usesMiaodaSecretProvider: false
|
|
941
|
-
}
|
|
1265
|
+
},
|
|
1266
|
+
templateVars: input.templateVars
|
|
942
1267
|
});
|
|
943
1268
|
}
|
|
944
1269
|
const JSON5 = loadJSON5();
|
|
@@ -954,8 +1279,10 @@ function runRepair(input) {
|
|
|
954
1279
|
const deps = analyzeProviderDeps(config);
|
|
955
1280
|
const ctx = {
|
|
956
1281
|
config,
|
|
1282
|
+
configPath: input.configPath,
|
|
957
1283
|
vars: input.vars,
|
|
958
|
-
providerDeps: deps
|
|
1284
|
+
providerDeps: deps,
|
|
1285
|
+
templateVars: input.templateVars
|
|
959
1286
|
};
|
|
960
1287
|
let configDirty = false;
|
|
961
1288
|
for (const rule of rules) {
|
|
@@ -985,44 +1312,223 @@ function runRepair(input) {
|
|
|
985
1312
|
}
|
|
986
1313
|
}
|
|
987
1314
|
//#endregion
|
|
988
|
-
//#region src/
|
|
989
|
-
|
|
990
|
-
|
|
1315
|
+
//#region src/paths.ts
|
|
1316
|
+
/**
|
|
1317
|
+
* Central directory for all ephemeral diagnose/reset artifacts: task status
|
|
1318
|
+
* files (`reset-<taskId>.json`) and human-readable step logs
|
|
1319
|
+
* (`reset-<taskId>.log`). Having everything under one dir makes debugging a
|
|
1320
|
+
* stuck reset much easier — `ls /tmp/openclaw-diagnose/` shows every recent
|
|
1321
|
+
* run, and each run's log is right next to its state.
|
|
1322
|
+
*/
|
|
1323
|
+
const DIAGNOSE_DIR = "/tmp/openclaw-diagnose";
|
|
1324
|
+
function resetResultFile(taskId) {
|
|
1325
|
+
return `${DIAGNOSE_DIR}/reset-${taskId}.json`;
|
|
1326
|
+
}
|
|
1327
|
+
function resetLogFile(taskId) {
|
|
1328
|
+
return `${DIAGNOSE_DIR}/reset-${taskId}.log`;
|
|
1329
|
+
}
|
|
1330
|
+
/** Sandbox workspace root where openclaw config + agent state lives. */
|
|
1331
|
+
const WORKSPACE_DIR = "/home/gem/workspace/agent";
|
|
1332
|
+
/** File containing the provider key used by the openclaw miaoda provider. */
|
|
1333
|
+
const PROVIDER_FILE_PATH = "/home/gem/workspace/.force/openclaw/miaoda-provider-key";
|
|
1334
|
+
/** File containing the miaoda openclaw secrets JSON. */
|
|
1335
|
+
const SECRETS_FILE_PATH = "/home/gem/workspace/.force/openclaw/miaoda-openclaw-secrets.json";
|
|
1336
|
+
/** Absolute path to the openclaw config JSON. */
|
|
1337
|
+
const CONFIG_PATH = `${WORKSPACE_DIR}/openclaw.json`;
|
|
1338
|
+
//#endregion
|
|
1339
|
+
//#region src/logger.ts
|
|
1340
|
+
/**
|
|
1341
|
+
* Shared CLI log file. Every log line the CLI emits — whether through
|
|
1342
|
+
* `console.error` (rules, helpers, errors) or through the per-task
|
|
1343
|
+
* `makeLogger` (reset worker) — is tee'd here so operators have a single
|
|
1344
|
+
* file to tail when diagnosing a sandbox.
|
|
1345
|
+
*
|
|
1346
|
+
* `/tmp` is ephemeral on sandbox restart; we rely on that for rotation
|
|
1347
|
+
* (no size-based rotation implemented).
|
|
1348
|
+
*/
|
|
1349
|
+
const CLI_LOG_FILE = "/tmp/openclaw-diagnose/cli.log";
|
|
1350
|
+
/** Append one line to the shared cli.log. Swallows any fs error —
|
|
1351
|
+
* logging must never break the business flow. */
|
|
1352
|
+
function appendCliLog(line) {
|
|
991
1353
|
try {
|
|
992
|
-
const
|
|
1354
|
+
const dir = node_path.default.dirname(CLI_LOG_FILE);
|
|
1355
|
+
if (!node_fs.default.existsSync(dir)) node_fs.default.mkdirSync(dir, { recursive: true });
|
|
1356
|
+
node_fs.default.appendFileSync(CLI_LOG_FILE, line);
|
|
1357
|
+
} catch {}
|
|
1358
|
+
}
|
|
1359
|
+
let stderrMirrorInstalled = false;
|
|
1360
|
+
/**
|
|
1361
|
+
* Install a process-wide `console.error` interceptor that mirrors each
|
|
1362
|
+
* line to BOTH the original stderr AND cli.log. Call once at CLI entry
|
|
1363
|
+
* before any subcommand dispatch; idempotent.
|
|
1364
|
+
*
|
|
1365
|
+
* Why console.error and not console.log: the CLI's stdout carries the
|
|
1366
|
+
* structured JSON result protocol consumed by sandbox_console and other
|
|
1367
|
+
* callers — any log line on stdout would corrupt JSON parsing. Rules,
|
|
1368
|
+
* helpers, and error paths therefore must route debug output through
|
|
1369
|
+
* console.error (stderr).
|
|
1370
|
+
*/
|
|
1371
|
+
function installStderrMirror() {
|
|
1372
|
+
if (stderrMirrorInstalled) return;
|
|
1373
|
+
stderrMirrorInstalled = true;
|
|
1374
|
+
const original = console.error.bind(console);
|
|
1375
|
+
console.error = (...args) => {
|
|
1376
|
+
original(...args);
|
|
1377
|
+
const body = args.map((a) => typeof a === "string" ? a : safeStringify(a)).join(" ");
|
|
1378
|
+
appendCliLog(`[${(/* @__PURE__ */ new Date()).toISOString()}] ${body}\n`);
|
|
1379
|
+
};
|
|
1380
|
+
}
|
|
1381
|
+
function safeStringify(v) {
|
|
1382
|
+
try {
|
|
1383
|
+
return JSON.stringify(v);
|
|
1384
|
+
} catch {
|
|
1385
|
+
return String(v);
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
function makeLogger(logFile) {
|
|
1389
|
+
try {
|
|
1390
|
+
const dir = node_path.default.dirname(logFile);
|
|
1391
|
+
if (!node_fs.default.existsSync(dir)) node_fs.default.mkdirSync(dir, { recursive: true });
|
|
1392
|
+
} catch {}
|
|
1393
|
+
return (msg) => {
|
|
1394
|
+
const line = `[${(/* @__PURE__ */ new Date()).toISOString()}] ${msg}\n`;
|
|
993
1395
|
try {
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1396
|
+
node_fs.default.appendFileSync(logFile, line);
|
|
1397
|
+
} catch {}
|
|
1398
|
+
appendCliLog(line);
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
//#endregion
|
|
1402
|
+
//#region src/fs-utils.ts
|
|
1403
|
+
/**
|
|
1404
|
+
* Rename src → dst, falling back to `mv` (which handles cross-device copy)
|
|
1405
|
+
* when the kernel returns EXDEV.
|
|
1406
|
+
*
|
|
1407
|
+
* Sandbox filesystems can put sibling paths on different "devices" from
|
|
1408
|
+
* rename(2)'s point of view: bind mounts, overlayfs copy-up, and
|
|
1409
|
+
* mount-point children inside a single directory all trip EXDEV. Seen in
|
|
1410
|
+
* production when reset's atomic swap did
|
|
1411
|
+
* /home/gem/.npm-global/lib/node_modules/openclaw → openclaw.bak
|
|
1412
|
+
* and the openclaw subdir was a bind-mounted volume.
|
|
1413
|
+
*
|
|
1414
|
+
* Behavior:
|
|
1415
|
+
* - Happy path hits rename(2) — atomic, single syscall, microseconds.
|
|
1416
|
+
* - EXDEV path shells out to `mv`, which does rename() then copy+unlink
|
|
1417
|
+
* on failure. Non-atomic but correct; callers already have rollback
|
|
1418
|
+
* logic (install-openclaw restores from .bak) so loss of atomicity
|
|
1419
|
+
* only matters if the process dies mid-copy, and that's survivable.
|
|
1420
|
+
* - Any other error (ENOENT, EACCES, EBUSY...) rethrows as-is so callers
|
|
1421
|
+
* see the real problem instead of a misleading `mv` fallback failure.
|
|
1422
|
+
*/
|
|
1423
|
+
function moveSafe(src, dst) {
|
|
1424
|
+
try {
|
|
1425
|
+
node_fs.default.renameSync(src, dst);
|
|
1021
1426
|
} catch (e) {
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1427
|
+
if (e?.code !== "EXDEV") throw e;
|
|
1428
|
+
execCaptureErr(`mv ${shellQuote(src)} ${shellQuote(dst)}`);
|
|
1429
|
+
}
|
|
1430
|
+
}
|
|
1431
|
+
/**
|
|
1432
|
+
* Run a shell command, re-throwing with stderr attached on failure.
|
|
1433
|
+
*
|
|
1434
|
+
* Node's `execSync(..., { stdio: 'ignore' })` swallows stderr entirely —
|
|
1435
|
+
* callers only see "Command failed: <cmd>" with no hint of the real error
|
|
1436
|
+
* (ENOSPC, EROFS, "unrecognized option", etc.). Production debugging on
|
|
1437
|
+
* sandboxed boxes is painful without the underlying message, so we pipe
|
|
1438
|
+
* stderr, capture it, and embed it in the thrown Error. stdout stays
|
|
1439
|
+
* suppressed because the commands we run here (tar/mv) are silent on
|
|
1440
|
+
* success.
|
|
1441
|
+
*/
|
|
1442
|
+
function execCaptureErr(cmd) {
|
|
1443
|
+
try {
|
|
1444
|
+
(0, node_child_process.execSync)(cmd, { stdio: [
|
|
1445
|
+
"ignore",
|
|
1446
|
+
"ignore",
|
|
1447
|
+
"pipe"
|
|
1448
|
+
] });
|
|
1449
|
+
} catch (e) {
|
|
1450
|
+
const stderr = e?.stderr;
|
|
1451
|
+
const stderrStr = (typeof stderr === "string" ? stderr : stderr?.toString("utf8") ?? "").trim();
|
|
1452
|
+
const base = e?.message ?? "command failed";
|
|
1453
|
+
throw new Error(stderrStr ? `${base}\nstderr: ${stderrStr}` : base);
|
|
1454
|
+
}
|
|
1455
|
+
}
|
|
1456
|
+
/** POSIX single-quote shell escape. Paths with embedded quotes are rare but
|
|
1457
|
+
* the token-file path conventions in sandboxes don't guarantee cleanliness. */
|
|
1458
|
+
function shellQuote(s) {
|
|
1459
|
+
return `'${s.replace(/'/g, `'\\''`)}'`;
|
|
1460
|
+
}
|
|
1461
|
+
/**
|
|
1462
|
+
* Extract an npm-packed gzipped tarball.
|
|
1463
|
+
*
|
|
1464
|
+
* ## The problem this works around
|
|
1465
|
+
*
|
|
1466
|
+
* Some tarballs (openclaw's among them — they're not packed by vanilla
|
|
1467
|
+
* `npm pack`) include relative symlinks inside nested .bin/ dirs whose
|
|
1468
|
+
* targets contain `..`, e.g.
|
|
1469
|
+
* node_modules/<pkg>/node_modules/.bin/foo -> ../foo/bin/cli.js
|
|
1470
|
+
*
|
|
1471
|
+
* GNU tar classifies any symlink target with `..` or a leading `/` as
|
|
1472
|
+
* "dangerous" and defers its extraction to a post-files pass, while also
|
|
1473
|
+
* needing a post-files pass to restore directory permissions/mtimes. The
|
|
1474
|
+
* two passes race: the deferred-symlink handling mutates parent-dir inodes,
|
|
1475
|
+
* then the directory stat-restore pass does `fstatat()` and the recorded
|
|
1476
|
+
* inode doesn't match, firing
|
|
1477
|
+
*
|
|
1478
|
+
* tar: <path>: Directory renamed before its status could be extracted
|
|
1479
|
+
*
|
|
1480
|
+
* from `apply_nonancestor_delayed_set_stat()` in extract.c. This is an
|
|
1481
|
+
* `ERROR` (hard-fail, exit 2) — the `--warning=no-rename-directory`
|
|
1482
|
+
* keyword controls a different, incremental-archive code path and does
|
|
1483
|
+
* NOT silence this. Reference: Paul Eggert, bug-tar 2004-04:
|
|
1484
|
+
* https://lists.gnu.org/archive/html/bug-tar/2004-04/msg00021.html
|
|
1485
|
+
*
|
|
1486
|
+
* ## The fix
|
|
1487
|
+
*
|
|
1488
|
+
* Pass `--absolute-names` (aka `-P`). Per GNU tar docs, this disables the
|
|
1489
|
+
* "normalize dangerous names" logic — including the deferred-symlink pass
|
|
1490
|
+
* that's racing us. Also stops stripping leading `/`, but our tarballs
|
|
1491
|
+
* only contain relative (`./node_modules/...`) paths so there's nothing
|
|
1492
|
+
* to strip. Safe because:
|
|
1493
|
+
* - The tarball is sha512-verified upstream (downloadWithCache)
|
|
1494
|
+
* - All entry paths are relative, no absolute-path escape risk
|
|
1495
|
+
* - All dangerous symlink targets resolve within the extracted tree
|
|
1496
|
+
*
|
|
1497
|
+
* ## Belt-and-suspenders
|
|
1498
|
+
*
|
|
1499
|
+
* If some tar variant still emits the error despite -P, we fall through
|
|
1500
|
+
* to checking the stderr pattern: if every error line is the benign
|
|
1501
|
+
* "Directory renamed …" text (no real failures like ENOSPC/EACCES/gzip
|
|
1502
|
+
* CRC/etc.), swallow exit 2. Callers MUST still verify extraction
|
|
1503
|
+
* (e.g. `fs.existsSync(path.join(dest, 'package.json'))`) — tar's
|
|
1504
|
+
* `skip_this_one = 1` after the error means some dirs missed their
|
|
1505
|
+
* mtime/mode restore, but content was written.
|
|
1506
|
+
*/
|
|
1507
|
+
function extractTarballTolerant(tarball, destDir, opts = {}) {
|
|
1508
|
+
const strip = opts.stripComponents ?? 0;
|
|
1509
|
+
const stripFlag = strip > 0 ? ` --strip-components=${strip}` : "";
|
|
1510
|
+
const cmd = `tar -xzf ${shellQuote(tarball)} -C ${shellQuote(destDir)}${stripFlag} -P`;
|
|
1511
|
+
try {
|
|
1512
|
+
execCaptureErr(cmd);
|
|
1513
|
+
return;
|
|
1514
|
+
} catch (e) {
|
|
1515
|
+
const msg = e?.message ?? "";
|
|
1516
|
+
const hasFalseAlarm = msg.includes("Directory renamed before its status could be extracted");
|
|
1517
|
+
const hasFatal = [
|
|
1518
|
+
/Cannot open/i,
|
|
1519
|
+
/Cannot mkdir/i,
|
|
1520
|
+
/Cannot create/i,
|
|
1521
|
+
/No space left on device/i,
|
|
1522
|
+
/Disk quota exceeded/i,
|
|
1523
|
+
/Permission denied/i,
|
|
1524
|
+
/Read-only file system/i,
|
|
1525
|
+
/unrecognized option/i,
|
|
1526
|
+
/gzip:/i,
|
|
1527
|
+
/Unexpected EOF/i,
|
|
1528
|
+
/Invalid argument/i
|
|
1529
|
+
].some((r) => r.test(msg));
|
|
1530
|
+
if (!hasFalseAlarm || hasFatal) throw e;
|
|
1531
|
+
console.error(`[tar] -P did not suppress "Directory renamed" on ${tarball}; tolerating (content must be verified by caller)`);
|
|
1026
1532
|
}
|
|
1027
1533
|
}
|
|
1028
1534
|
//#endregion
|
|
@@ -1034,7 +1540,9 @@ function runBackup(input) {
|
|
|
1034
1540
|
*/
|
|
1035
1541
|
function startAsyncReset(ctxBase64) {
|
|
1036
1542
|
const taskId = (0, node_crypto.randomUUID)();
|
|
1037
|
-
const resultFile =
|
|
1543
|
+
const resultFile = resetResultFile(taskId);
|
|
1544
|
+
const log = makeLogger(resetLogFile(taskId));
|
|
1545
|
+
log(`=== startAsyncReset spawning worker for taskId=${taskId} ===`);
|
|
1038
1546
|
const initial = {
|
|
1039
1547
|
status: "running",
|
|
1040
1548
|
step: 0,
|
|
@@ -1046,7 +1554,7 @@ function startAsyncReset(ctxBase64) {
|
|
|
1046
1554
|
const dir = node_path.default.dirname(resultFile);
|
|
1047
1555
|
if (!node_fs.default.existsSync(dir)) node_fs.default.mkdirSync(dir, { recursive: true });
|
|
1048
1556
|
node_fs.default.writeFileSync(tmpPath, JSON.stringify(initial), "utf-8");
|
|
1049
|
-
|
|
1557
|
+
moveSafe(tmpPath, resultFile);
|
|
1050
1558
|
const child = (0, node_child_process.spawn)(process.execPath, [
|
|
1051
1559
|
process.argv[1],
|
|
1052
1560
|
"reset",
|
|
@@ -1058,6 +1566,7 @@ function startAsyncReset(ctxBase64) {
|
|
|
1058
1566
|
stdio: "ignore"
|
|
1059
1567
|
});
|
|
1060
1568
|
child.on("error", (err) => {
|
|
1569
|
+
log(`FATAL worker failed to start: ${err.message}`);
|
|
1061
1570
|
const failResult = {
|
|
1062
1571
|
status: "failed",
|
|
1063
1572
|
step: 0,
|
|
@@ -1069,52 +1578,295 @@ function startAsyncReset(ctxBase64) {
|
|
|
1069
1578
|
};
|
|
1070
1579
|
const errTmpPath = resultFile + ".tmp";
|
|
1071
1580
|
node_fs.default.writeFileSync(errTmpPath, JSON.stringify(failResult));
|
|
1072
|
-
|
|
1581
|
+
moveSafe(errTmpPath, resultFile);
|
|
1073
1582
|
});
|
|
1074
1583
|
child.unref();
|
|
1584
|
+
log(`spawned worker pid=${child.pid}`);
|
|
1075
1585
|
return { taskId };
|
|
1076
1586
|
}
|
|
1077
1587
|
//#endregion
|
|
1588
|
+
//#region src/oss/fetchManifest.ts
|
|
1589
|
+
const MANIFEST_PREFIX = "builtin/manifests/openclaw/recommended/";
|
|
1590
|
+
const MANIFEST_SUFFIX = ".json";
|
|
1591
|
+
async function fetchManifest(ossFileMap, tag) {
|
|
1592
|
+
const key = `${MANIFEST_PREFIX}${tag}${MANIFEST_SUFFIX}`;
|
|
1593
|
+
const url = ossFileMap[key];
|
|
1594
|
+
if (!url) {
|
|
1595
|
+
const available = Object.keys(ossFileMap).filter((k) => k.startsWith(MANIFEST_PREFIX) && k.endsWith(MANIFEST_SUFFIX)).map((k) => k.slice(39, -5));
|
|
1596
|
+
const availStr = available.length ? available.join(", ") : "(none)";
|
|
1597
|
+
throw new Error(`manifest signed URL missing for tag "${tag}" (key ${key}). Available tags in ossFileMap: ${availStr}. Either pass an available tag or update the studio_server TCC openclaw_upgrade_config supported_versions.`);
|
|
1598
|
+
}
|
|
1599
|
+
const res = await fetch(url);
|
|
1600
|
+
if (!res.ok) throw new Error(`fetch manifest failed: HTTP ${res.status} ${res.statusText}`);
|
|
1601
|
+
return await res.json();
|
|
1602
|
+
}
|
|
1603
|
+
async function downloadWithCache(pkg, ossFileMap, opts = {}) {
|
|
1604
|
+
const cacheRoot = opts.cacheRoot ?? "/tmp/openclaw-diagnose/resources";
|
|
1605
|
+
const shortHash = pkg.shasum.slice(0, 16);
|
|
1606
|
+
const destDir = node_path.default.join(cacheRoot, shortHash);
|
|
1607
|
+
const destFile = node_path.default.join(destDir, node_path.default.posix.basename(pkg.ossKey));
|
|
1608
|
+
node_fs.default.mkdirSync(destDir, { recursive: true });
|
|
1609
|
+
if (node_fs.default.existsSync(destFile)) return destFile;
|
|
1610
|
+
const url = ossFileMap[pkg.ossKey];
|
|
1611
|
+
if (!url) throw new Error(`signed URL missing for ${pkg.ossKey}`);
|
|
1612
|
+
if (!pkg.integrity.startsWith("sha512-")) throw new Error(`unsupported integrity format: ${pkg.integrity}`);
|
|
1613
|
+
const expected = pkg.integrity.slice(7);
|
|
1614
|
+
const tmpFile = node_path.default.join(destDir, `.tmp.${process.pid}.${node_crypto.default.randomBytes(4).toString("hex")}`);
|
|
1615
|
+
try {
|
|
1616
|
+
const res = await fetch(url);
|
|
1617
|
+
if (!res.ok) throw new Error(`download failed: HTTP ${res.status}`);
|
|
1618
|
+
if (!res.body) throw new Error(`download failed: empty body for ${pkg.ossKey}`);
|
|
1619
|
+
const hasher = node_crypto.default.createHash("sha512");
|
|
1620
|
+
const source = node_stream.Readable.fromWeb(res.body);
|
|
1621
|
+
async function* teeAndHash(src) {
|
|
1622
|
+
for await (const chunk of src) {
|
|
1623
|
+
hasher.update(chunk);
|
|
1624
|
+
yield chunk;
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
await (0, node_stream_promises.pipeline)(source, teeAndHash, node_fs.default.createWriteStream(tmpFile));
|
|
1628
|
+
const actual = hasher.digest("base64");
|
|
1629
|
+
if (actual !== expected) {
|
|
1630
|
+
const envBypass = process.env.OPENCLAW_DEBUG_SKIP_INTEGRITY === "1";
|
|
1631
|
+
if (opts.skipIntegrity || envBypass) {
|
|
1632
|
+
const sourceLabel = opts.skipIntegrity ? "skipIntegrity=true" : "OPENCLAW_DEBUG_SKIP_INTEGRITY=1";
|
|
1633
|
+
console.error(`⚠ [downloadWithCache] INTEGRITY BYPASS for ${pkg.ossKey}: expected ${expected.slice(0, 12)}… got ${actual.slice(0, 12)}… — ${sourceLabel}. DO NOT use this flag in production.`);
|
|
1634
|
+
} else throw new Error(`integrity mismatch for ${pkg.ossKey}: expected ${expected} got ${actual}`);
|
|
1635
|
+
}
|
|
1636
|
+
moveSafe(tmpFile, destFile);
|
|
1637
|
+
return destFile;
|
|
1638
|
+
} catch (e) {
|
|
1639
|
+
try {
|
|
1640
|
+
node_fs.default.unlinkSync(tmpFile);
|
|
1641
|
+
} catch {}
|
|
1642
|
+
throw e;
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
async function installOpenclaw(openclawTag, ossFileMap, opts = {}) {
|
|
1646
|
+
const homeBase = opts.homeBase ?? "/home/gem";
|
|
1647
|
+
const t0 = Date.now();
|
|
1648
|
+
const pkg = (await fetchManifest(ossFileMap, openclawTag)).packages.find((p) => p.role === "cli" && p.name === "openclaw");
|
|
1649
|
+
if (!pkg) throw new Error("install-openclaw: role=cli,name=openclaw not found in manifest");
|
|
1650
|
+
const targetDir = opts.targetDir ?? node_path.default.join(homeBase, pkg.installPath);
|
|
1651
|
+
const bakDir = targetDir + ".bak";
|
|
1652
|
+
const newDir = targetDir + ".new";
|
|
1653
|
+
const tarball = await downloadWithCache(pkg, ossFileMap, opts);
|
|
1654
|
+
console.error(`[install-openclaw] tag=${openclawTag} shasum=${pkg.shasum.slice(0, 12)}...`);
|
|
1655
|
+
if (node_fs.default.existsSync(newDir)) node_fs.default.rmSync(newDir, {
|
|
1656
|
+
recursive: true,
|
|
1657
|
+
force: true
|
|
1658
|
+
});
|
|
1659
|
+
if (node_fs.default.existsSync(bakDir)) node_fs.default.rmSync(bakDir, {
|
|
1660
|
+
recursive: true,
|
|
1661
|
+
force: true
|
|
1662
|
+
});
|
|
1663
|
+
node_fs.default.mkdirSync(node_path.default.dirname(targetDir), { recursive: true });
|
|
1664
|
+
const tmpStage = node_fs.default.mkdtempSync(node_path.default.join(opts.tmpRoot ?? node_os.default.tmpdir(), "openclaw-install-"));
|
|
1665
|
+
try {
|
|
1666
|
+
extractTarballTolerant(tarball, tmpStage, { stripComponents: 1 });
|
|
1667
|
+
if (!node_fs.default.existsSync(node_path.default.join(tmpStage, "package.json"))) throw new Error("extracted tarball missing package.json");
|
|
1668
|
+
moveSafe(tmpStage, newDir);
|
|
1669
|
+
const hadExisting = node_fs.default.existsSync(targetDir);
|
|
1670
|
+
try {
|
|
1671
|
+
if (hadExisting) moveSafe(targetDir, bakDir);
|
|
1672
|
+
moveSafe(newDir, targetDir);
|
|
1673
|
+
} catch (e) {
|
|
1674
|
+
if (hadExisting && !node_fs.default.existsSync(targetDir) && node_fs.default.existsSync(bakDir)) try {
|
|
1675
|
+
moveSafe(bakDir, targetDir);
|
|
1676
|
+
} catch {}
|
|
1677
|
+
try {
|
|
1678
|
+
node_fs.default.rmSync(newDir, {
|
|
1679
|
+
recursive: true,
|
|
1680
|
+
force: true
|
|
1681
|
+
});
|
|
1682
|
+
} catch {}
|
|
1683
|
+
throw e;
|
|
1684
|
+
}
|
|
1685
|
+
if (hadExisting && node_fs.default.existsSync(bakDir)) node_fs.default.rmSync(bakDir, {
|
|
1686
|
+
recursive: true,
|
|
1687
|
+
force: true
|
|
1688
|
+
});
|
|
1689
|
+
} finally {
|
|
1690
|
+
if (node_fs.default.existsSync(tmpStage)) try {
|
|
1691
|
+
node_fs.default.rmSync(tmpStage, {
|
|
1692
|
+
recursive: true,
|
|
1693
|
+
force: true
|
|
1694
|
+
});
|
|
1695
|
+
} catch {}
|
|
1696
|
+
}
|
|
1697
|
+
console.error(`[install-openclaw] done in ${Date.now() - t0}ms`);
|
|
1698
|
+
}
|
|
1699
|
+
async function installExtension(tag, ossFileMap, opts = {}) {
|
|
1700
|
+
const homeBase = opts.homeBase ?? "/home/gem";
|
|
1701
|
+
const hasAll = !!opts.all;
|
|
1702
|
+
const hasNames = (opts.names?.length ?? 0) > 0;
|
|
1703
|
+
if (hasAll && hasNames) throw new Error("install-extension: --all and --extension are mutually exclusive");
|
|
1704
|
+
if (!hasAll && !hasNames) throw new Error("install-extension: must provide --all or --extension=<name>");
|
|
1705
|
+
const allExts = (await fetchManifest(ossFileMap, tag)).packages.filter((p) => p.role === "extension");
|
|
1706
|
+
let targets;
|
|
1707
|
+
if (hasAll) targets = allExts;
|
|
1708
|
+
else {
|
|
1709
|
+
const wanted = new Set(opts.names);
|
|
1710
|
+
targets = allExts.filter((p) => wanted.has(p.name) || p.packageName != null && wanted.has(p.packageName));
|
|
1711
|
+
const foundKeys = /* @__PURE__ */ new Set();
|
|
1712
|
+
for (const t of targets) {
|
|
1713
|
+
foundKeys.add(t.name);
|
|
1714
|
+
if (t.packageName) foundKeys.add(t.packageName);
|
|
1715
|
+
}
|
|
1716
|
+
const missing = opts.names.filter((n) => !foundKeys.has(n));
|
|
1717
|
+
if (missing.length > 0) throw new Error(`install-extension: not found in manifest: ${missing.join(", ")}`);
|
|
1718
|
+
}
|
|
1719
|
+
console.error(`[install-extension] tag=${tag} targets=${targets.length}`);
|
|
1720
|
+
const t0 = Date.now();
|
|
1721
|
+
const tarballs = await Promise.all(targets.map(async (p) => {
|
|
1722
|
+
const tb = await downloadWithCache(p, ossFileMap, opts);
|
|
1723
|
+
console.error(`[install-extension] ${p.name}: downloaded`);
|
|
1724
|
+
return {
|
|
1725
|
+
pkg: p,
|
|
1726
|
+
tarball: tb
|
|
1727
|
+
};
|
|
1728
|
+
}));
|
|
1729
|
+
for (const { pkg, tarball } of tarballs) {
|
|
1730
|
+
installOne(pkg, tarball, homeBase);
|
|
1731
|
+
console.error(`[install-extension] ${pkg.name}: installed`);
|
|
1732
|
+
}
|
|
1733
|
+
if (!opts.skipConfigUpdate) updatePluginInstalls(opts.configPath ?? node_path.default.join(homeBase, "workspace/agent/openclaw.json"), targets);
|
|
1734
|
+
else console.error(`[install-extension] skipConfigUpdate=true — not touching openclaw.json`);
|
|
1735
|
+
console.error(`[install-extension] done ${targets.length}/${targets.length} in ${Date.now() - t0}ms`);
|
|
1736
|
+
}
|
|
1737
|
+
/**
|
|
1738
|
+
* Merge each installed extension's installMetadata into openclaw.json's
|
|
1739
|
+
* plugins.installs[<pkg.name>]. Atomic write via tmp + rename.
|
|
1740
|
+
*
|
|
1741
|
+
* - No openclaw.json → log + return (not an error; some install contexts don't have it yet)
|
|
1742
|
+
* - Extension without installMetadata in manifest → skip that entry (log)
|
|
1743
|
+
* - Existing plugins.installs entries for other extensions left untouched
|
|
1744
|
+
*/
|
|
1745
|
+
function updatePluginInstalls(configPath, installedPkgs) {
|
|
1746
|
+
if (!node_fs.default.existsSync(configPath)) {
|
|
1747
|
+
console.error(`[install-extension] no config at ${configPath} — skip plugins.installs update`);
|
|
1748
|
+
return;
|
|
1749
|
+
}
|
|
1750
|
+
const JSON5 = loadJSON5();
|
|
1751
|
+
const raw = node_fs.default.readFileSync(configPath, "utf-8");
|
|
1752
|
+
const config = JSON5.parse(raw);
|
|
1753
|
+
if (!config.plugins || typeof config.plugins !== "object") config.plugins = {};
|
|
1754
|
+
const plugins = config.plugins;
|
|
1755
|
+
if (!plugins.installs || typeof plugins.installs !== "object") plugins.installs = {};
|
|
1756
|
+
const installs = plugins.installs;
|
|
1757
|
+
let updated = 0;
|
|
1758
|
+
let skipped = 0;
|
|
1759
|
+
for (const pkg of installedPkgs) if (pkg.installMetadata) {
|
|
1760
|
+
installs[pkg.name] = pkg.installMetadata;
|
|
1761
|
+
updated++;
|
|
1762
|
+
} else skipped++;
|
|
1763
|
+
const tmpPath = configPath + ".installs-tmp";
|
|
1764
|
+
node_fs.default.writeFileSync(tmpPath, JSON.stringify(config, null, 2), "utf-8");
|
|
1765
|
+
moveSafe(tmpPath, configPath);
|
|
1766
|
+
console.error(`[install-extension] plugins.installs updated: ${updated} entry(ies) in ${configPath}` + (skipped > 0 ? ` (${skipped} package(s) without installMetadata skipped)` : ""));
|
|
1767
|
+
}
|
|
1768
|
+
function installOne(pkg, tarball, homeBase) {
|
|
1769
|
+
const destDir = node_path.default.join(homeBase, pkg.installPath);
|
|
1770
|
+
const stagingDir = destDir + ".new";
|
|
1771
|
+
const oldDir = destDir + ".old";
|
|
1772
|
+
node_fs.default.mkdirSync(node_path.default.dirname(destDir), { recursive: true });
|
|
1773
|
+
if (node_fs.default.existsSync(stagingDir)) node_fs.default.rmSync(stagingDir, {
|
|
1774
|
+
recursive: true,
|
|
1775
|
+
force: true
|
|
1776
|
+
});
|
|
1777
|
+
node_fs.default.mkdirSync(stagingDir);
|
|
1778
|
+
try {
|
|
1779
|
+
extractTarballTolerant(tarball, stagingDir, { stripComponents: 1 });
|
|
1780
|
+
if (!node_fs.default.existsSync(node_path.default.join(stagingDir, "package.json"))) throw new Error(`extension tarball missing package.json: ${pkg.name}`);
|
|
1781
|
+
} catch (e) {
|
|
1782
|
+
try {
|
|
1783
|
+
node_fs.default.rmSync(stagingDir, {
|
|
1784
|
+
recursive: true,
|
|
1785
|
+
force: true
|
|
1786
|
+
});
|
|
1787
|
+
} catch {}
|
|
1788
|
+
throw e;
|
|
1789
|
+
}
|
|
1790
|
+
const hadOld = node_fs.default.existsSync(destDir);
|
|
1791
|
+
if (hadOld) moveSafe(destDir, oldDir);
|
|
1792
|
+
moveSafe(stagingDir, destDir);
|
|
1793
|
+
if (hadOld && node_fs.default.existsSync(oldDir)) node_fs.default.rmSync(oldDir, {
|
|
1794
|
+
recursive: true,
|
|
1795
|
+
force: true
|
|
1796
|
+
});
|
|
1797
|
+
}
|
|
1798
|
+
/**
|
|
1799
|
+
* Download + extract a config/template package to its install destination.
|
|
1800
|
+
*
|
|
1801
|
+
* Current manifest has all resources as format=tgz with content at the root
|
|
1802
|
+
* (config: openclaw.json file at root; template: scripts/ dir at root), so we
|
|
1803
|
+
* always `tar -xzf` without --strip-components into `dirname(fullInstallPath)`.
|
|
1804
|
+
* The final artefact ends up at exactly `homeBase + pkg.installPath`.
|
|
1805
|
+
*/
|
|
1806
|
+
async function downloadResource(tag, ossFileMap, opts) {
|
|
1807
|
+
const homeBase = opts.homeBase ?? "/home/gem";
|
|
1808
|
+
const pkg = (await fetchManifest(ossFileMap, tag)).packages.find((p) => p.role === opts.role && p.name === opts.name);
|
|
1809
|
+
if (!pkg) throw new Error(`download-resource: not found in manifest: role=${opts.role} name=${opts.name}`);
|
|
1810
|
+
const file = await downloadWithCache(pkg, ossFileMap, opts);
|
|
1811
|
+
const fullInstallPath = node_path.default.join(homeBase, pkg.installPath);
|
|
1812
|
+
const extractDir = opts.dir ?? node_path.default.dirname(fullInstallPath);
|
|
1813
|
+
node_fs.default.mkdirSync(extractDir, { recursive: true });
|
|
1814
|
+
const format = (pkg.format ?? "").toLowerCase();
|
|
1815
|
+
const lower = pkg.ossKey.toLowerCase();
|
|
1816
|
+
if (format === "tgz" || lower.endsWith(".tgz") || lower.endsWith(".tar.gz")) {
|
|
1817
|
+
extractTarballTolerant(file, extractDir);
|
|
1818
|
+
console.error(`[download-resource] ${opts.role}/${opts.name}: extracted to ${extractDir}`);
|
|
1819
|
+
} else {
|
|
1820
|
+
const basename = node_path.default.posix.basename(pkg.ossKey);
|
|
1821
|
+
node_fs.default.copyFileSync(file, node_path.default.join(extractDir, basename));
|
|
1822
|
+
console.error(`[download-resource] ${opts.role}/${opts.name}: copied ${basename} to ${extractDir}`);
|
|
1823
|
+
}
|
|
1824
|
+
}
|
|
1825
|
+
//#endregion
|
|
1826
|
+
//#region src/oss/getOpenclawTag.ts
|
|
1827
|
+
/**
|
|
1828
|
+
* Extracts the openclaw tag from the manifest key present in ossFileMap.
|
|
1829
|
+
* Avoids passing an extra ctx field — we already know the tag from the
|
|
1830
|
+
* well-known manifest key studio_server included.
|
|
1831
|
+
*
|
|
1832
|
+
* Manifest key shape: builtin/manifests/openclaw/recommended/<tag>.json
|
|
1833
|
+
*/
|
|
1834
|
+
function getOpenclawTagFromOssFileMap(ossFileMap) {
|
|
1835
|
+
const prefix = "builtin/manifests/openclaw/recommended/";
|
|
1836
|
+
const suffix = ".json";
|
|
1837
|
+
for (const key of Object.keys(ossFileMap)) if (key.startsWith(prefix) && key.endsWith(suffix)) return key.slice(39, -5);
|
|
1838
|
+
throw new Error("cannot resolve openclaw tag: ossFileMap missing manifest key");
|
|
1839
|
+
}
|
|
1840
|
+
//#endregion
|
|
1078
1841
|
//#region src/reset.ts
|
|
1079
1842
|
const STEPS = [
|
|
1080
1843
|
"备份当前配置",
|
|
1081
|
-
"下载技术栈模板",
|
|
1082
1844
|
"生成默认配置",
|
|
1083
1845
|
"杀掉 openclaw 进程",
|
|
1084
|
-
"
|
|
1846
|
+
"等待沙箱初始化完成",
|
|
1847
|
+
"确认 openclaw 版本",
|
|
1085
1848
|
"合并核心备份配置",
|
|
1086
|
-
"
|
|
1087
|
-
"
|
|
1849
|
+
"检查启动脚本",
|
|
1850
|
+
"安装扩展",
|
|
1088
1851
|
"启动并验证"
|
|
1089
1852
|
];
|
|
1090
1853
|
const TOTAL_STEPS = STEPS.length;
|
|
1091
|
-
const CORE_BACKUP_PATH = "/home/gem/workspace/.force/openclaw/core-backup.json";
|
|
1092
|
-
const TMP_RESET_DIR = "/tmp/openclaw-reset";
|
|
1093
|
-
/**
|
|
1094
|
-
* Atomically write the result file (write .tmp + rename).
|
|
1095
|
-
*/
|
|
1096
1854
|
function writeResultFile(resultFile, result) {
|
|
1097
1855
|
const dir = node_path.default.dirname(resultFile);
|
|
1098
1856
|
if (!node_fs.default.existsSync(dir)) node_fs.default.mkdirSync(dir, { recursive: true });
|
|
1099
1857
|
const tmpPath = resultFile + ".tmp";
|
|
1100
1858
|
node_fs.default.writeFileSync(tmpPath, JSON.stringify(result), "utf-8");
|
|
1101
|
-
|
|
1859
|
+
moveSafe(tmpPath, resultFile);
|
|
1102
1860
|
}
|
|
1103
|
-
|
|
1104
|
-
* Update progress in the result file.
|
|
1105
|
-
*/
|
|
1106
|
-
function updateProgress(resultFile, step, progress, startedAt) {
|
|
1861
|
+
function updateProgress(resultFile, step, startedAt) {
|
|
1107
1862
|
writeResultFile(resultFile, {
|
|
1108
1863
|
status: "running",
|
|
1109
1864
|
step,
|
|
1110
1865
|
totalSteps: TOTAL_STEPS,
|
|
1111
|
-
progress,
|
|
1866
|
+
progress: STEPS[step - 1],
|
|
1112
1867
|
startedAt
|
|
1113
1868
|
});
|
|
1114
1869
|
}
|
|
1115
|
-
/**
|
|
1116
|
-
* Mark the task as done.
|
|
1117
|
-
*/
|
|
1118
1870
|
function markDone(resultFile, startedAt) {
|
|
1119
1871
|
writeResultFile(resultFile, {
|
|
1120
1872
|
status: "done",
|
|
@@ -1125,146 +1877,361 @@ function markDone(resultFile, startedAt) {
|
|
|
1125
1877
|
completedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1126
1878
|
});
|
|
1127
1879
|
}
|
|
1128
|
-
|
|
1129
|
-
* Mark the task as failed.
|
|
1130
|
-
*/
|
|
1131
|
-
function markFailed(resultFile, step, progress, error, startedAt) {
|
|
1880
|
+
function markFailed(resultFile, step, error, startedAt) {
|
|
1132
1881
|
writeResultFile(resultFile, {
|
|
1133
1882
|
status: "failed",
|
|
1134
1883
|
step,
|
|
1135
1884
|
totalSteps: TOTAL_STEPS,
|
|
1136
|
-
progress,
|
|
1885
|
+
progress: step > 0 ? STEPS[step - 1] : "初始化",
|
|
1137
1886
|
error,
|
|
1138
1887
|
startedAt,
|
|
1139
1888
|
completedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1140
1889
|
});
|
|
1141
1890
|
}
|
|
1142
1891
|
/**
|
|
1892
|
+
* Download the template assets (config/openclaw.json + template/scripts) from
|
|
1893
|
+
* OSS into a scratch directory so the existing step 2 (generateDefaultConfig)
|
|
1894
|
+
* and step 7 (copyStartupScripts) can consume them as local files — the rest
|
|
1895
|
+
* of the orchestrator code stays untouched.
|
|
1896
|
+
*
|
|
1897
|
+
* Called once before step 1. The caller is responsible for rm -rf'ing
|
|
1898
|
+
* stagedDir in a finally{} block after the reset completes (or fails).
|
|
1899
|
+
*/
|
|
1900
|
+
async function stageTemplate(openclawTag, ossFileMap, stagedDir, configDir, log) {
|
|
1901
|
+
if (node_fs.default.existsSync(stagedDir)) node_fs.default.rmSync(stagedDir, {
|
|
1902
|
+
recursive: true,
|
|
1903
|
+
force: true
|
|
1904
|
+
});
|
|
1905
|
+
node_fs.default.mkdirSync(stagedDir, { recursive: true });
|
|
1906
|
+
await downloadResource(openclawTag, ossFileMap, {
|
|
1907
|
+
role: "config",
|
|
1908
|
+
name: "openclaw.json",
|
|
1909
|
+
dir: stagedDir
|
|
1910
|
+
});
|
|
1911
|
+
await downloadResource(openclawTag, ossFileMap, {
|
|
1912
|
+
role: "template",
|
|
1913
|
+
name: "scripts",
|
|
1914
|
+
dir: configDir
|
|
1915
|
+
});
|
|
1916
|
+
log(`staged openclaw.json to ${stagedDir}, scripts directly to ${configDir}/scripts`);
|
|
1917
|
+
}
|
|
1918
|
+
/** Step 1: Backup current config as openclaw.json.bak.N */
|
|
1919
|
+
function backupCurrentConfig(configPath, log) {
|
|
1920
|
+
if (!fileExists(configPath)) {
|
|
1921
|
+
log("no existing config, skip backup");
|
|
1922
|
+
return;
|
|
1923
|
+
}
|
|
1924
|
+
const dir = node_path.default.dirname(configPath);
|
|
1925
|
+
let maxN = 0;
|
|
1926
|
+
try {
|
|
1927
|
+
for (const f of node_fs.default.readdirSync(dir)) {
|
|
1928
|
+
const match = f.match(/^openclaw\.json\.bak\.(\d+)$/);
|
|
1929
|
+
if (match) {
|
|
1930
|
+
const n = parseInt(match[1], 10);
|
|
1931
|
+
if (n > maxN) maxN = n;
|
|
1932
|
+
}
|
|
1933
|
+
}
|
|
1934
|
+
} catch {}
|
|
1935
|
+
const bakPath = configPath + ".bak." + (maxN + 1);
|
|
1936
|
+
node_fs.default.copyFileSync(configPath, bakPath);
|
|
1937
|
+
log(`backed up to ${bakPath}`);
|
|
1938
|
+
}
|
|
1939
|
+
/** Step 2: Replace $$__XXX__ placeholders and write default config. */
|
|
1940
|
+
function generateDefaultConfig(srcDir, configPath, templateVars, log) {
|
|
1941
|
+
const srcConfigPath = node_path.default.join(srcDir, "openclaw.json");
|
|
1942
|
+
if (!fileExists(srcConfigPath)) throw new Error("staged openclaw.json not found at " + srcConfigPath);
|
|
1943
|
+
let content = node_fs.default.readFileSync(srcConfigPath, "utf-8");
|
|
1944
|
+
let replaced = 0;
|
|
1945
|
+
for (const [placeholder, value] of Object.entries(templateVars)) {
|
|
1946
|
+
const parts = content.split(placeholder);
|
|
1947
|
+
if (parts.length > 1) replaced += parts.length - 1;
|
|
1948
|
+
content = parts.join(value);
|
|
1949
|
+
}
|
|
1950
|
+
node_fs.default.writeFileSync(configPath, content, "utf-8");
|
|
1951
|
+
log(`wrote ${configPath} (${replaced} placeholder(s) replaced, ${Object.keys(templateVars).length} provided)`);
|
|
1952
|
+
}
|
|
1953
|
+
/** Step 3: Kill all openclaw processes. */
|
|
1954
|
+
function killOpenclawProcesses(log) {
|
|
1955
|
+
try {
|
|
1956
|
+
shell("pkill -f openclaw-gateway || true", 5e3);
|
|
1957
|
+
} catch {}
|
|
1958
|
+
shell("sleep 2", 5e3);
|
|
1959
|
+
log("killed openclaw-gateway processes");
|
|
1960
|
+
}
|
|
1961
|
+
/**
|
|
1962
|
+
* Step 4: Wait for the sandbox's own init (init_sandbox.sh / concurrent npm
|
|
1963
|
+
* install) to finish before we start our own work. Two processes sharing
|
|
1964
|
+
* ~/.npm cache + competing for disk/network just makes everything crawl;
|
|
1965
|
+
* letting init finish first is the cleanest way to get exclusive access.
|
|
1966
|
+
* Polls every 10s up to `maxWaitMs`. If the deadline is hit we fall through
|
|
1967
|
+
* anyway — better to try than to fail the reset outright.
|
|
1968
|
+
*
|
|
1969
|
+
* Kept even after we switched off `npm install` because the sandbox init
|
|
1970
|
+
* script still runs `npm install` for other packages and holds cache locks.
|
|
1971
|
+
*/
|
|
1972
|
+
function waitForInitNpm(maxWaitMs, log) {
|
|
1973
|
+
const deadline = Date.now() + maxWaitMs;
|
|
1974
|
+
const ownPid = String(process.pid);
|
|
1975
|
+
let polls = 0;
|
|
1976
|
+
while (Date.now() < deadline) {
|
|
1977
|
+
polls++;
|
|
1978
|
+
let running = 0;
|
|
1979
|
+
try {
|
|
1980
|
+
const out = shell(`pgrep -af "init_sandbox.sh|npm install|npm i " | grep -v -- "${ownPid}" | wc -l`, 1e4);
|
|
1981
|
+
running = parseInt(out.trim(), 10) || 0;
|
|
1982
|
+
} catch {
|
|
1983
|
+
log(`poll ${polls}: no concurrent npm, proceeding`);
|
|
1984
|
+
return;
|
|
1985
|
+
}
|
|
1986
|
+
if (running === 0) {
|
|
1987
|
+
log(`poll ${polls}: no concurrent npm, proceeding`);
|
|
1988
|
+
return;
|
|
1989
|
+
}
|
|
1990
|
+
log(`poll ${polls}: ${running} concurrent npm/init process(es) still running, waiting 10s`);
|
|
1991
|
+
try {
|
|
1992
|
+
shell("sleep 10", 12e3);
|
|
1993
|
+
} catch {}
|
|
1994
|
+
}
|
|
1995
|
+
log(`deadline (${maxWaitMs}ms) hit after ${polls} poll(s), proceeding anyway`);
|
|
1996
|
+
}
|
|
1997
|
+
/**
|
|
1998
|
+
* Step 5: Install openclaw from the OSS-provided tarball at the target tag,
|
|
1999
|
+
* then verify `openclaw --version` output contains that tag. No npm involved.
|
|
2000
|
+
*/
|
|
2001
|
+
async function step5InstallOpenclaw(openclawTag, ossFileMap, log) {
|
|
2002
|
+
log(`install-openclaw tag=${openclawTag}`);
|
|
2003
|
+
await installOpenclaw(openclawTag, ossFileMap);
|
|
2004
|
+
const out = shell("openclaw --version 2>&1 || true", 1e4).trim();
|
|
2005
|
+
if (!out.includes(openclawTag)) throw new Error(`openclaw version verify failed: got "${out}"`);
|
|
2006
|
+
log(`openclaw version verified: ${out}`);
|
|
2007
|
+
}
|
|
2008
|
+
/** Step 6: Merge coreBackup from resetData + ensure allowedOrigins. */
|
|
2009
|
+
function mergeCoreBackupAndOrigins(configPath, vars, resetData, log) {
|
|
2010
|
+
const JSON5 = loadJSON5();
|
|
2011
|
+
const backup = resetData.coreBackup;
|
|
2012
|
+
if (backup) {
|
|
2013
|
+
const config = JSON5.parse(node_fs.default.readFileSync(configPath, "utf-8"));
|
|
2014
|
+
const merged = [];
|
|
2015
|
+
if (backup.agents && backup.agents.length > 0) {
|
|
2016
|
+
if (!config.agents) config.agents = {};
|
|
2017
|
+
const agents = config.agents;
|
|
2018
|
+
if (!Array.isArray(agents.list)) agents.list = [];
|
|
2019
|
+
const configDir = node_path.default.dirname(configPath);
|
|
2020
|
+
for (const agent of backup.agents) {
|
|
2021
|
+
const enriched = {
|
|
2022
|
+
id: agent.id,
|
|
2023
|
+
name: agent.id,
|
|
2024
|
+
workspace: agent.workspace,
|
|
2025
|
+
agentDir: configDir + "/agents/" + agent.id + "/agent"
|
|
2026
|
+
};
|
|
2027
|
+
agents.list.push(enriched);
|
|
2028
|
+
}
|
|
2029
|
+
merged.push(`agents(+${backup.agents.length})`);
|
|
2030
|
+
const list = agents.list;
|
|
2031
|
+
let mainIdx = list.findIndex((a) => a.id === "main");
|
|
2032
|
+
if (mainIdx < 0) {
|
|
2033
|
+
list.unshift({ id: "main" });
|
|
2034
|
+
mainIdx = 0;
|
|
2035
|
+
}
|
|
2036
|
+
list[mainIdx].subagents = { allowAgents: ["*"] };
|
|
2037
|
+
list[mainIdx].default = true;
|
|
2038
|
+
merged.push("main-team-mode");
|
|
2039
|
+
const feishu = config.channels?.feishu;
|
|
2040
|
+
if (feishu) {
|
|
2041
|
+
if (!feishu.accounts) feishu.accounts = {};
|
|
2042
|
+
const accounts = feishu.accounts;
|
|
2043
|
+
const defaultAccount = {};
|
|
2044
|
+
for (const key of [
|
|
2045
|
+
"dmPolicy",
|
|
2046
|
+
"allowFrom",
|
|
2047
|
+
"groupPolicy",
|
|
2048
|
+
"groupAllowFrom"
|
|
2049
|
+
]) if (feishu[key] !== void 0) defaultAccount[key] = feishu[key];
|
|
2050
|
+
if (Object.keys(defaultAccount).length > 0) {
|
|
2051
|
+
accounts.default = defaultAccount;
|
|
2052
|
+
merged.push("accounts.default");
|
|
2053
|
+
}
|
|
2054
|
+
}
|
|
2055
|
+
}
|
|
2056
|
+
if (backup.bindings && backup.bindings.length > 0) {
|
|
2057
|
+
config.bindings = backup.bindings;
|
|
2058
|
+
merged.push("bindings");
|
|
2059
|
+
}
|
|
2060
|
+
const backupAccounts = backup.channels?.feishu?.accounts;
|
|
2061
|
+
if (backupAccounts && Object.keys(backupAccounts).length > 0) {
|
|
2062
|
+
if (!config.channels) config.channels = {};
|
|
2063
|
+
const ch = config.channels;
|
|
2064
|
+
if (!ch.feishu) ch.feishu = {};
|
|
2065
|
+
const feishu = ch.feishu;
|
|
2066
|
+
if (!feishu.accounts) feishu.accounts = {};
|
|
2067
|
+
Object.assign(feishu.accounts, backupAccounts);
|
|
2068
|
+
merged.push("channels.feishu.accounts");
|
|
2069
|
+
}
|
|
2070
|
+
node_fs.default.writeFileSync(configPath, JSON.stringify(config, null, 2), "utf-8");
|
|
2071
|
+
log(`merged from coreBackup: [${merged.join(", ") || "nothing"}]`);
|
|
2072
|
+
} else log("no coreBackup in resetData, skip multi-agent merge");
|
|
2073
|
+
const expectedOrigins = Array.isArray(vars.expectedOrigins) ? vars.expectedOrigins : [];
|
|
2074
|
+
if (expectedOrigins.length === 0) {
|
|
2075
|
+
log("no expectedOrigins provided");
|
|
2076
|
+
return;
|
|
2077
|
+
}
|
|
2078
|
+
const config = JSON5.parse(node_fs.default.readFileSync(configPath, "utf-8"));
|
|
2079
|
+
if (!config.gateway) config.gateway = {};
|
|
2080
|
+
const gw = config.gateway;
|
|
2081
|
+
if (!gw.controlUi) gw.controlUi = {};
|
|
2082
|
+
const cui = gw.controlUi;
|
|
2083
|
+
const current = Array.isArray(cui.allowedOrigins) ? cui.allowedOrigins.filter((o) => typeof o === "string") : [];
|
|
2084
|
+
if (current.includes("*")) {
|
|
2085
|
+
log("allowedOrigins already contains \"*\", skip origin merge");
|
|
2086
|
+
return;
|
|
2087
|
+
}
|
|
2088
|
+
const seen = new Set(current);
|
|
2089
|
+
const added = [];
|
|
2090
|
+
const mergedOrigins = [...current];
|
|
2091
|
+
for (const o of expectedOrigins) if (!seen.has(o)) {
|
|
2092
|
+
mergedOrigins.push(o);
|
|
2093
|
+
seen.add(o);
|
|
2094
|
+
added.push(o);
|
|
2095
|
+
}
|
|
2096
|
+
cui.allowedOrigins = mergedOrigins;
|
|
2097
|
+
node_fs.default.writeFileSync(configPath, JSON.stringify(config, null, 2), "utf-8");
|
|
2098
|
+
log(`allowedOrigins: added ${added.length} (${JSON.stringify(added)}), total now ${mergedOrigins.length}`);
|
|
2099
|
+
}
|
|
2100
|
+
/**
|
|
2101
|
+
* Step 7: Verify startup scripts landed in configDir/scripts/.
|
|
2102
|
+
*
|
|
2103
|
+
* Scripts are extracted directly to configDir/scripts/ during stageTemplate —
|
|
2104
|
+
* there's no intermediate copy any more. This step is now a verification gate
|
|
2105
|
+
* (rather than a copy action) so the step count stays at 9 and we fail early
|
|
2106
|
+
* if the template tgz didn't carry a scripts/ dir.
|
|
2107
|
+
*/
|
|
2108
|
+
function verifyStartupScripts(configDir, log) {
|
|
2109
|
+
const targetScriptsDir = node_path.default.join(configDir, "scripts");
|
|
2110
|
+
if (!node_fs.default.existsSync(targetScriptsDir)) throw new Error(`scripts dir missing at ${targetScriptsDir} — template download failed?`);
|
|
2111
|
+
log(`scripts dir present at ${targetScriptsDir}`);
|
|
2112
|
+
}
|
|
2113
|
+
/**
|
|
2114
|
+
* Step 8: Install all extensions listed in the OSS manifest at `openclawTag`.
|
|
2115
|
+
* Replaces the old `plugins update --all` / pre-packed tar.gz flow — the
|
|
2116
|
+
* manifest is now the single source of truth for which extensions ship.
|
|
2117
|
+
*/
|
|
2118
|
+
async function step8InstallExtensions(openclawTag, ossFileMap, log) {
|
|
2119
|
+
log(`install-extension --all tag=${openclawTag}`);
|
|
2120
|
+
await installExtension(openclawTag, ossFileMap, {
|
|
2121
|
+
all: true,
|
|
2122
|
+
skipConfigUpdate: true
|
|
2123
|
+
});
|
|
2124
|
+
log("extensions installed");
|
|
2125
|
+
}
|
|
2126
|
+
/** Step 9: Write secrets/provider key files and restart openclaw. */
|
|
2127
|
+
function writeSecretsAndRestart(vars, resetData, configDir, log) {
|
|
2128
|
+
if (resetData.secretsContent && vars.secretsFilePath) {
|
|
2129
|
+
writeFile(vars.secretsFilePath, resetData.secretsContent);
|
|
2130
|
+
log(`wrote secrets to ${vars.secretsFilePath}`);
|
|
2131
|
+
}
|
|
2132
|
+
if (resetData.providerKeyContent && vars.providerFilePath) {
|
|
2133
|
+
writeFile(vars.providerFilePath, resetData.providerKeyContent);
|
|
2134
|
+
log(`wrote provider key to ${vars.providerFilePath}`);
|
|
2135
|
+
}
|
|
2136
|
+
const restartScript = node_path.default.join(configDir, "scripts", "restart.sh");
|
|
2137
|
+
if (fileExists(restartScript)) {
|
|
2138
|
+
const t = Date.now();
|
|
2139
|
+
shell(`bash '${restartScript}'`, 3e4);
|
|
2140
|
+
log(`restart.sh done in ${Date.now() - t}ms`);
|
|
2141
|
+
} else log(`no restart.sh at ${restartScript}, skip`);
|
|
2142
|
+
}
|
|
2143
|
+
/**
|
|
1143
2144
|
* Run the 9-step reset process. Called from the worker entry point.
|
|
2145
|
+
*
|
|
2146
|
+
* Each step is an independent function. The orchestrator handles progress
|
|
2147
|
+
* reporting, error handling, and process-level exception guards.
|
|
2148
|
+
*
|
|
2149
|
+
* Template assets (openclaw.json + scripts/) are downloaded from OSS into a
|
|
2150
|
+
* scratch dir via `stageTemplate` before step 1 — there is no bundled
|
|
2151
|
+
* `template/` directory at runtime any more.
|
|
1144
2152
|
*/
|
|
1145
|
-
function runReset(input, taskId, resultFile) {
|
|
2153
|
+
async function runReset(input, taskId, resultFile) {
|
|
1146
2154
|
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1147
|
-
const { configPath, resetData } = input;
|
|
2155
|
+
const { configPath, vars, resetData } = input;
|
|
1148
2156
|
const configDir = node_path.default.dirname(configPath);
|
|
2157
|
+
const stagedDir = node_path.default.join(DIAGNOSE_DIR, `reset-${taskId}-template`);
|
|
1149
2158
|
let currentStep = 0;
|
|
2159
|
+
let stepStartedAt = Date.now();
|
|
2160
|
+
const log = makeLogger(resetLogFile(taskId));
|
|
2161
|
+
log(`=== reset started, taskId=${taskId}, pid=${process.pid} ===`);
|
|
2162
|
+
log(`configPath=${configPath}, configDir=${configDir}, stagedDir=${stagedDir}`);
|
|
2163
|
+
const ossFileMap = resetData.ossFileMap;
|
|
2164
|
+
if (!ossFileMap || Object.keys(ossFileMap).length === 0) {
|
|
2165
|
+
const err = "resetData.ossFileMap missing or empty";
|
|
2166
|
+
log(`ERROR: ${err}`);
|
|
2167
|
+
markFailed(resultFile, 0, err, startedAt);
|
|
2168
|
+
process.exit(1);
|
|
2169
|
+
}
|
|
2170
|
+
let openclawTag;
|
|
2171
|
+
if (resetData.openclawTag) openclawTag = resetData.openclawTag;
|
|
2172
|
+
else try {
|
|
2173
|
+
openclawTag = getOpenclawTagFromOssFileMap(ossFileMap);
|
|
2174
|
+
} catch (e) {
|
|
2175
|
+
const err = e.message;
|
|
2176
|
+
log(`ERROR: ${err}`);
|
|
2177
|
+
markFailed(resultFile, 0, err, startedAt);
|
|
2178
|
+
process.exit(1);
|
|
2179
|
+
}
|
|
2180
|
+
log(`openclawTag=${openclawTag}`);
|
|
1150
2181
|
process.on("uncaughtException", (err) => {
|
|
1151
|
-
|
|
1152
|
-
markFailed(resultFile, currentStep,
|
|
2182
|
+
log(`FATAL uncaughtException: ${err.message}\n${err.stack ?? ""}`);
|
|
2183
|
+
markFailed(resultFile, currentStep, `uncaught exception: ${err.message}`, startedAt);
|
|
1153
2184
|
process.exit(1);
|
|
1154
2185
|
});
|
|
1155
2186
|
process.on("unhandledRejection", (reason) => {
|
|
1156
|
-
|
|
1157
|
-
markFailed(resultFile, currentStep,
|
|
2187
|
+
log(`FATAL unhandledRejection: ${String(reason)}`);
|
|
2188
|
+
markFailed(resultFile, currentStep, `unhandled rejection: ${reason}`, startedAt);
|
|
1158
2189
|
process.exit(1);
|
|
1159
2190
|
});
|
|
2191
|
+
/** Advance to the next step, updating the progress file and logging a boundary. */
|
|
2192
|
+
const step = (n) => {
|
|
2193
|
+
if (currentStep > 0) log(`step ${currentStep} "${STEPS[currentStep - 1]}" done in ${Date.now() - stepStartedAt}ms`);
|
|
2194
|
+
currentStep = n;
|
|
2195
|
+
stepStartedAt = Date.now();
|
|
2196
|
+
log(`--- step ${n}/${TOTAL_STEPS}: ${STEPS[n - 1]} ---`);
|
|
2197
|
+
updateProgress(resultFile, n, startedAt);
|
|
2198
|
+
};
|
|
1160
2199
|
try {
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
updateProgress(resultFile, currentStep, STEPS[2], startedAt);
|
|
1190
|
-
const srcConfigPath = node_path.default.join(srcDir, "openclaw.json");
|
|
1191
|
-
if (!fileExists(srcConfigPath)) throw new Error("template openclaw.json not found in downloaded zip at " + srcConfigPath);
|
|
1192
|
-
let configContent = node_fs.default.readFileSync(srcConfigPath, "utf-8");
|
|
1193
|
-
for (const [placeholder, value] of Object.entries(resetData.templateVars)) configContent = configContent.split(placeholder).join(value);
|
|
1194
|
-
node_fs.default.writeFileSync(configPath, configContent, "utf-8");
|
|
1195
|
-
currentStep = 4;
|
|
1196
|
-
updateProgress(resultFile, currentStep, STEPS[3], startedAt);
|
|
1197
|
-
try {
|
|
1198
|
-
shell("pkill -f openclaw-gateway || true", 5e3);
|
|
1199
|
-
} catch {}
|
|
1200
|
-
shell("sleep 2", 5e3);
|
|
1201
|
-
currentStep = 5;
|
|
1202
|
-
updateProgress(resultFile, currentStep, STEPS[4], startedAt);
|
|
1203
|
-
const JSON5 = loadJSON5();
|
|
1204
|
-
const version = JSON5.parse(node_fs.default.readFileSync(srcConfigPath, "utf-8")).meta?.lastTouchedVersion;
|
|
1205
|
-
if (version) shell(`npm install -g @anthropic-ai/openclaw@${version}`, 3e5);
|
|
1206
|
-
else shell("npm install -g @anthropic-ai/openclaw@latest", 3e5);
|
|
1207
|
-
currentStep = 6;
|
|
1208
|
-
updateProgress(resultFile, currentStep, STEPS[5], startedAt);
|
|
1209
|
-
if (fileExists(CORE_BACKUP_PATH)) {
|
|
1210
|
-
const backup = JSON.parse(node_fs.default.readFileSync(CORE_BACKUP_PATH, "utf-8"));
|
|
1211
|
-
const mergeConfig = JSON5.parse(node_fs.default.readFileSync(configPath, "utf-8"));
|
|
1212
|
-
if (backup.agents) mergeConfig.agents = backup.agents;
|
|
1213
|
-
if (backup.bindings) mergeConfig.bindings = backup.bindings;
|
|
1214
|
-
const backupFeishu = backup.channels?.feishu;
|
|
1215
|
-
if (backupFeishu?.accounts) {
|
|
1216
|
-
if (!mergeConfig.channels) mergeConfig.channels = {};
|
|
1217
|
-
const ch = mergeConfig.channels;
|
|
1218
|
-
if (!ch.feishu) ch.feishu = {};
|
|
1219
|
-
const feishu = ch.feishu;
|
|
1220
|
-
feishu.accounts = backupFeishu.accounts;
|
|
1221
|
-
}
|
|
1222
|
-
node_fs.default.writeFileSync(configPath, JSON.stringify(mergeConfig, null, 2), "utf-8");
|
|
1223
|
-
}
|
|
1224
|
-
currentStep = 7;
|
|
1225
|
-
updateProgress(resultFile, currentStep, STEPS[6], startedAt);
|
|
1226
|
-
const srcScriptsDir = node_path.default.join(srcDir, "scripts");
|
|
1227
|
-
const targetScriptsDir = node_path.default.join(configDir, "scripts");
|
|
1228
|
-
if (node_fs.default.existsSync(srcScriptsDir)) {
|
|
1229
|
-
if (!node_fs.default.existsSync(targetScriptsDir)) node_fs.default.mkdirSync(targetScriptsDir, { recursive: true });
|
|
1230
|
-
shell(`cp -r '${srcScriptsDir}'/* '${targetScriptsDir}/'`, 1e4);
|
|
1231
|
-
}
|
|
1232
|
-
currentStep = 8;
|
|
1233
|
-
updateProgress(resultFile, currentStep, STEPS[7], startedAt);
|
|
1234
|
-
try {
|
|
1235
|
-
shell("openclaw plugins install --all", 3e5);
|
|
1236
|
-
} catch (e) {}
|
|
1237
|
-
currentStep = 9;
|
|
1238
|
-
updateProgress(resultFile, currentStep, STEPS[8], startedAt);
|
|
1239
|
-
if (resetData.secretsContent && input.vars.secretsFilePath) writeFile(input.vars.secretsFilePath, resetData.secretsContent);
|
|
1240
|
-
if (resetData.providerKeyContent && input.vars.providerFilePath) writeFile(input.vars.providerFilePath, resetData.providerKeyContent);
|
|
1241
|
-
const restartScript = node_path.default.join(configDir, "scripts", "restart.sh");
|
|
1242
|
-
if (fileExists(restartScript)) shell(`bash '${restartScript}'`, 3e4);
|
|
2200
|
+
await stageTemplate(openclawTag, ossFileMap, stagedDir, configDir, log);
|
|
2201
|
+
step(1);
|
|
2202
|
+
backupCurrentConfig(configPath, log);
|
|
2203
|
+
step(2);
|
|
2204
|
+
generateDefaultConfig(stagedDir, configPath, resetData.templateVars, log);
|
|
2205
|
+
step(3);
|
|
2206
|
+
killOpenclawProcesses(log);
|
|
2207
|
+
step(4);
|
|
2208
|
+
waitForInitNpm(10 * 6e4, log);
|
|
2209
|
+
step(5);
|
|
2210
|
+
await step5InstallOpenclaw(openclawTag, ossFileMap, log);
|
|
2211
|
+
step(6);
|
|
2212
|
+
mergeCoreBackupAndOrigins(configPath, vars, resetData, log);
|
|
2213
|
+
step(7);
|
|
2214
|
+
verifyStartupScripts(configDir, log);
|
|
2215
|
+
step(8);
|
|
2216
|
+
await step8InstallExtensions(openclawTag, ossFileMap, log);
|
|
2217
|
+
step(9);
|
|
2218
|
+
writeSecretsAndRestart(vars, resetData, configDir, log);
|
|
2219
|
+
log(`step 9 "${STEPS[8]}" done in ${Date.now() - stepStartedAt}ms`);
|
|
2220
|
+
log("=== reset completed successfully ===");
|
|
2221
|
+
markDone(resultFile, startedAt);
|
|
2222
|
+
} catch (e) {
|
|
2223
|
+
const err = e.message;
|
|
2224
|
+
log(`ERROR in step ${currentStep} "${STEPS[currentStep - 1] ?? "init"}" after ${Date.now() - stepStartedAt}ms: ${err}\n${e.stack ?? ""}`);
|
|
2225
|
+
markFailed(resultFile, currentStep, err, startedAt);
|
|
2226
|
+
process.exit(1);
|
|
2227
|
+
} finally {
|
|
1243
2228
|
try {
|
|
1244
|
-
node_fs.default.rmSync(
|
|
2229
|
+
node_fs.default.rmSync(stagedDir, {
|
|
1245
2230
|
recursive: true,
|
|
1246
2231
|
force: true
|
|
1247
2232
|
});
|
|
1248
2233
|
} catch {}
|
|
1249
|
-
markDone(resultFile, startedAt);
|
|
1250
|
-
} catch (e) {
|
|
1251
|
-
const stepName = currentStep > 0 ? STEPS[currentStep - 1] : "初始化";
|
|
1252
|
-
markFailed(resultFile, currentStep, stepName, e.message, startedAt);
|
|
1253
|
-
process.exit(1);
|
|
1254
|
-
}
|
|
1255
|
-
}
|
|
1256
|
-
/**
|
|
1257
|
-
* Find the source directory within the extracted zip.
|
|
1258
|
-
* Looks for openclaw.json in TMP_RESET_DIR or one level deep.
|
|
1259
|
-
*/
|
|
1260
|
-
function findSrcDir(baseDir) {
|
|
1261
|
-
if (node_fs.default.existsSync(node_path.default.join(baseDir, "openclaw.json"))) return baseDir;
|
|
1262
|
-
const entries = node_fs.default.readdirSync(baseDir, { withFileTypes: true });
|
|
1263
|
-
for (const entry of entries) if (entry.isDirectory()) {
|
|
1264
|
-
const candidate = node_path.default.join(baseDir, entry.name);
|
|
1265
|
-
if (node_fs.default.existsSync(node_path.default.join(candidate, "openclaw.json"))) return candidate;
|
|
1266
2234
|
}
|
|
1267
|
-
return baseDir;
|
|
1268
2235
|
}
|
|
1269
2236
|
//#endregion
|
|
1270
2237
|
//#region src/get-reset-task.ts
|
|
@@ -1274,7 +2241,7 @@ function findSrcDir(baseDir) {
|
|
|
1274
2241
|
* Returns immediately on terminal states (done/failed).
|
|
1275
2242
|
*/
|
|
1276
2243
|
function getResetTask(taskId) {
|
|
1277
|
-
const resultFile =
|
|
2244
|
+
const resultFile = resetResultFile(taskId);
|
|
1278
2245
|
const deadline = Date.now() + 3e4;
|
|
1279
2246
|
while (Date.now() < deadline) {
|
|
1280
2247
|
if (!node_fs.default.existsSync(resultFile)) {
|
|
@@ -1305,65 +2272,746 @@ function sleepSync(ms) {
|
|
|
1305
2272
|
Atomics.wait(arr, 0, 0, ms);
|
|
1306
2273
|
}
|
|
1307
2274
|
//#endregion
|
|
2275
|
+
//#region src/oss/resolveOssFileMap.ts
|
|
2276
|
+
/**
|
|
2277
|
+
* Pick an OssFileMap in the order of decreasing specificity:
|
|
2278
|
+
* 1. `--oss_file_map=` flag — operator override (manual invocations, tests)
|
|
2279
|
+
* 2. `ctx.install.ossFileMap` — new shape (innerapi-driven DoctorCtx)
|
|
2280
|
+
* 3. `ctx.resetData.ossFileMap` — legacy shape (sandbox_console push path)
|
|
2281
|
+
*
|
|
2282
|
+
* Throws when none of the three yields a non-empty map. Empty maps are
|
|
2283
|
+
* treated as missing — an empty map is useless downstream and almost always
|
|
2284
|
+
* indicates a ctx wiring bug.
|
|
2285
|
+
*/
|
|
2286
|
+
function resolveOssFileMap(args) {
|
|
2287
|
+
if (args.ossFileMapFlag) return JSON.parse(Buffer.from(args.ossFileMapFlag, "base64").toString("utf-8"));
|
|
2288
|
+
if (args.installOssFileMap && Object.keys(args.installOssFileMap).length > 0) return args.installOssFileMap;
|
|
2289
|
+
if (args.resetDataOssFileMap && Object.keys(args.resetDataOssFileMap).length > 0) return args.resetDataOssFileMap;
|
|
2290
|
+
throw new Error("ossFileMap missing: provide --oss_file_map flag, ctx.install.ossFileMap, or resetData.ossFileMap");
|
|
2291
|
+
}
|
|
2292
|
+
//#endregion
|
|
2293
|
+
//#region src/innerapi/fetchCtx.ts
|
|
2294
|
+
/**
|
|
2295
|
+
* CLI-side client for studio_server's `openclaw.get_doctor_ctx` inner API.
|
|
2296
|
+
*
|
|
2297
|
+
* Mirrors the proven pattern in
|
|
2298
|
+
* `packages/openclaw/extensions/miaoda/src/shared/innerapi-client.ts`:
|
|
2299
|
+
*
|
|
2300
|
+
* - `baseURL` from env `FORCE_AUTHN_INNERAPI_DOMAIN` (injected into every
|
|
2301
|
+
* openclaw sandbox).
|
|
2302
|
+
* - `platform: { enabled, tokenProvider: { type: 'file' } }` — the platform
|
|
2303
|
+
* plugin auto-attaches the sandbox's identity JWT loaded from the
|
|
2304
|
+
* rootfs token file. Same auth that the miaoda extension already uses.
|
|
2305
|
+
* - POST `/api/v1/studio/innerapi/integration_apis/call`
|
|
2306
|
+
* body = { apiName: 'openclaw.get_doctor_ctx', input: {}, bizType: 'openclaw' }
|
|
2307
|
+
* — the server-side APICall dispatches by `apiName` to
|
|
2308
|
+
* `GetDoctorCtxAPICall.Execute` whose `Name()` returns that string.
|
|
2309
|
+
* - Response envelope: { status_code, error_msg?, data: { success, output, ... } }.
|
|
2310
|
+
* `status_code` is a *string* ('0' = success).
|
|
2311
|
+
* Actual DoctorCtx lives in `data.output`.
|
|
2312
|
+
* - `x-tt-logid` header is logged on every failure path for cross-service
|
|
2313
|
+
* traceability.
|
|
2314
|
+
*
|
|
2315
|
+
* On HTTP 401 (sandbox identity token expired/invalid) we `process.exit(77)`
|
|
2316
|
+
* instead of throwing — the outer catch in `index.ts` cannot then mask auth
|
|
2317
|
+
* failure as a generic "Error: ...". Caller (e.g. sandbox_console) sees the
|
|
2318
|
+
* exit code and can refresh the token + retry.
|
|
2319
|
+
*/
|
|
2320
|
+
const INNERAPI_CALL_PATH = "/api/v1/studio/innerapi/integration_apis/call";
|
|
2321
|
+
const API_NAME = "openclaw.get_doctor_ctx";
|
|
2322
|
+
const BIZ_TYPE = "openclaw";
|
|
2323
|
+
const API_TIMEOUT_MS = 3e4;
|
|
2324
|
+
const MAX_LOG_BODY = 500;
|
|
2325
|
+
let clientInstance = null;
|
|
2326
|
+
function getHttpClient() {
|
|
2327
|
+
if (!clientInstance) {
|
|
2328
|
+
const apiUrl = process.env.FORCE_AUTHN_INNERAPI_DOMAIN;
|
|
2329
|
+
(0, node_assert.default)(apiUrl, "missing env: FORCE_AUTHN_INNERAPI_DOMAIN (openclaw sandbox runtime must expose this)");
|
|
2330
|
+
clientInstance = new _lark_apaas_http_client.HttpClient({
|
|
2331
|
+
baseURL: apiUrl,
|
|
2332
|
+
timeout: API_TIMEOUT_MS,
|
|
2333
|
+
platform: {
|
|
2334
|
+
enabled: true,
|
|
2335
|
+
tokenProvider: { type: "file" }
|
|
2336
|
+
}
|
|
2337
|
+
});
|
|
2338
|
+
}
|
|
2339
|
+
return clientInstance;
|
|
2340
|
+
}
|
|
2341
|
+
/**
|
|
2342
|
+
* Fetch the sandbox's DoctorCtx by calling the innerapi's generic
|
|
2343
|
+
* `integration_apis/call` dispatcher with apiName=openclaw.get_doctor_ctx.
|
|
2344
|
+
*
|
|
2345
|
+
* Throws on HTTP (non-401) / decode / business errors. On 401 calls
|
|
2346
|
+
* `process.exit(77)` directly.
|
|
2347
|
+
*/
|
|
2348
|
+
async function fetchCtxViaInnerApi() {
|
|
2349
|
+
const client = getHttpClient();
|
|
2350
|
+
const body = {
|
|
2351
|
+
apiName: API_NAME,
|
|
2352
|
+
input: {},
|
|
2353
|
+
bizType: BIZ_TYPE
|
|
2354
|
+
};
|
|
2355
|
+
const start = Date.now();
|
|
2356
|
+
const headers = { "Content-Type": "application/json" };
|
|
2357
|
+
const ttEnv = process.env.X_TT_ENV;
|
|
2358
|
+
if (ttEnv) headers["x-tt-env"] = ttEnv;
|
|
2359
|
+
let response;
|
|
2360
|
+
try {
|
|
2361
|
+
response = await client.post(INNERAPI_CALL_PATH, body, { headers });
|
|
2362
|
+
} catch (e) {
|
|
2363
|
+
const durationMs = Date.now() - start;
|
|
2364
|
+
if (e instanceof _lark_apaas_http_client.HttpError && e.response) {
|
|
2365
|
+
const status = e.response.status;
|
|
2366
|
+
const logId = e.response.headers.get("x-tt-logid") ?? "";
|
|
2367
|
+
if (status === 401) {
|
|
2368
|
+
console.error(`[CLI] innerapi 401 (logID: ${logId}) — sandbox identity token expired/invalid; exiting 77`);
|
|
2369
|
+
process.exit(77);
|
|
2370
|
+
}
|
|
2371
|
+
throw new Error(`fetchCtxViaInnerApi HTTP ${status} ${e.response.statusText} (logID: ${logId}, durationMs: ${durationMs})`);
|
|
2372
|
+
}
|
|
2373
|
+
const msg = e instanceof Error ? e.message : String(e);
|
|
2374
|
+
throw new Error(`fetchCtxViaInnerApi network error: ${msg} (durationMs: ${durationMs})`);
|
|
2375
|
+
}
|
|
2376
|
+
const logId = response.headers.get("x-tt-logid") ?? "";
|
|
2377
|
+
const durationMs = Date.now() - start;
|
|
2378
|
+
if (!response.ok) {
|
|
2379
|
+
if (response.status === 401) {
|
|
2380
|
+
console.error(`[CLI] innerapi 401 (logID: ${logId}) — sandbox identity token expired/invalid; exiting 77`);
|
|
2381
|
+
process.exit(77);
|
|
2382
|
+
}
|
|
2383
|
+
let preview = "";
|
|
2384
|
+
try {
|
|
2385
|
+
preview = (await response.text()).slice(0, MAX_LOG_BODY);
|
|
2386
|
+
} catch {}
|
|
2387
|
+
throw new Error(`fetchCtxViaInnerApi HTTP ${response.status} ${response.statusText} (logID: ${logId}, durationMs: ${durationMs})${preview ? ` body=${preview}` : ""}`);
|
|
2388
|
+
}
|
|
2389
|
+
let envelope;
|
|
2390
|
+
try {
|
|
2391
|
+
envelope = await response.json();
|
|
2392
|
+
} catch {
|
|
2393
|
+
throw new Error(`fetchCtxViaInnerApi decode error (logID: ${logId}, durationMs: ${durationMs})`);
|
|
2394
|
+
}
|
|
2395
|
+
if (envelope.status_code !== "0") throw new Error(`fetchCtxViaInnerApi API error (logID: ${logId}, durationMs: ${durationMs}): code=${envelope.status_code}, message=${envelope.error_msg ?? ""}`);
|
|
2396
|
+
if (envelope.data && envelope.data.success === false) throw new Error(`fetchCtxViaInnerApi business error (logID: ${logId}, durationMs: ${durationMs}): ${envelope.error_msg ?? JSON.stringify(envelope.data)}`);
|
|
2397
|
+
const output = envelope.data?.output;
|
|
2398
|
+
if (!output || typeof output !== "object") throw new Error(`fetchCtxViaInnerApi empty/invalid output (logID: ${logId}, durationMs: ${durationMs})`);
|
|
2399
|
+
return output;
|
|
2400
|
+
}
|
|
2401
|
+
//#endregion
|
|
2402
|
+
//#region src/ctx/normalize.ts
|
|
2403
|
+
/**
|
|
2404
|
+
* Accept raw ctx from any of these sources and produce a uniform view:
|
|
2405
|
+
* - New shape (DoctorCtx): `{ app, install, secrets, reset }` — from innerapi.
|
|
2406
|
+
* - Old shape (ResetInput): `{ configPath, vars, resetData }` — from
|
|
2407
|
+
* sandbox_console push path.
|
|
2408
|
+
* Detection is structural: if the top-level has all four new-shape groups we
|
|
2409
|
+
* pass through; otherwise we remap from the old shape.
|
|
2410
|
+
*
|
|
2411
|
+
* Missing fields fall back to safe empty defaults (empty strings / arrays /
|
|
2412
|
+
* maps) so every downstream consumer can read e.g. `ctx.app.feishuAppID`
|
|
2413
|
+
* without an extra nullish guard.
|
|
2414
|
+
*/
|
|
2415
|
+
function normalizeCtx(raw) {
|
|
2416
|
+
const r = raw ?? {};
|
|
2417
|
+
if (r.app && typeof r.app === "object" && r.install && typeof r.install === "object" && r.secrets && typeof r.secrets === "object" && r.reset && typeof r.reset === "object") return {
|
|
2418
|
+
app: fillApp(r.app),
|
|
2419
|
+
install: {
|
|
2420
|
+
openclawTag: r.install.openclawTag,
|
|
2421
|
+
ossFileMap: r.install.ossFileMap ?? {}
|
|
2422
|
+
},
|
|
2423
|
+
secrets: {
|
|
2424
|
+
secretsContent: r.secrets.secretsContent ?? "",
|
|
2425
|
+
providerKeyContent: r.secrets.providerKeyContent ?? ""
|
|
2426
|
+
},
|
|
2427
|
+
reset: {
|
|
2428
|
+
templateVars: r.reset.templateVars ?? {},
|
|
2429
|
+
coreBackup: r.reset.coreBackup
|
|
2430
|
+
}
|
|
2431
|
+
};
|
|
2432
|
+
const vars = r.vars ?? {};
|
|
2433
|
+
const resetData = r.resetData ?? {};
|
|
2434
|
+
const repairData = r.repairData ?? {};
|
|
2435
|
+
return {
|
|
2436
|
+
app: fillApp(vars),
|
|
2437
|
+
install: {
|
|
2438
|
+
openclawTag: r.install?.openclawTag ?? r.openclawTag,
|
|
2439
|
+
ossFileMap: r.install?.ossFileMap ?? resetData.ossFileMap ?? r.ossFileMap ?? {}
|
|
2440
|
+
},
|
|
2441
|
+
secrets: {
|
|
2442
|
+
secretsContent: resetData.secretsContent ?? repairData.secretsContent ?? "",
|
|
2443
|
+
providerKeyContent: resetData.providerKeyContent ?? repairData.providerKeyContent ?? ""
|
|
2444
|
+
},
|
|
2445
|
+
reset: {
|
|
2446
|
+
templateVars: resetData.templateVars ?? {},
|
|
2447
|
+
coreBackup: resetData.coreBackup
|
|
2448
|
+
}
|
|
2449
|
+
};
|
|
2450
|
+
}
|
|
2451
|
+
function fillApp(src) {
|
|
2452
|
+
return {
|
|
2453
|
+
feishuAppID: src.feishuAppID ?? "",
|
|
2454
|
+
feishuAppSecret: src.feishuAppSecret ?? "",
|
|
2455
|
+
feishuOpenID: src.feishuOpenID ?? "",
|
|
2456
|
+
openClawName: src.openClawName ?? "",
|
|
2457
|
+
gatewayToken: src.gatewayToken ?? "",
|
|
2458
|
+
innerAPIKey: src.innerAPIKey ?? "",
|
|
2459
|
+
baseURL: src.baseURL ?? "",
|
|
2460
|
+
miaodaDomain: src.miaodaDomain ?? "",
|
|
2461
|
+
miaodaOrigin: src.miaodaOrigin ?? "",
|
|
2462
|
+
expectedOrigins: Array.isArray(src.expectedOrigins) ? src.expectedOrigins : []
|
|
2463
|
+
};
|
|
2464
|
+
}
|
|
2465
|
+
//#endregion
|
|
2466
|
+
//#region src/ctx-input.ts
|
|
2467
|
+
/**
|
|
2468
|
+
* Build legacy Check/Repair/Reset input shapes from a raw ctx object. Shared
|
|
2469
|
+
* by both the top-level CLI dispatcher (`index.ts`) and the new `doctor`
|
|
2470
|
+
* subcommand (`doctor.ts`), which need identical input synthesis.
|
|
2471
|
+
*
|
|
2472
|
+
* Behavior:
|
|
2473
|
+
* - If `raw` already carries the legacy `configPath + vars` shape (the one
|
|
2474
|
+
* sandbox_console push emits), it's trusted and returned as-is. This
|
|
2475
|
+
* keeps the existing sandbox_console push contract working.
|
|
2476
|
+
* - Otherwise `raw` is treated as the new-shape DoctorCtx (or anything
|
|
2477
|
+
* structurally close — `normalizeCtx` fills the gaps with safe empties)
|
|
2478
|
+
* and the legacy Vars shape is synthesised using the hardcoded sandbox
|
|
2479
|
+
* path invariants from `paths.ts`.
|
|
2480
|
+
*
|
|
2481
|
+
* The optional `configPathOverride` lets unit tests point the builder at a
|
|
2482
|
+
* tmp file; production callers should leave it undefined so the sandbox
|
|
2483
|
+
* invariant from `paths.ts` is used.
|
|
2484
|
+
*/
|
|
2485
|
+
function buildCheckInput(raw, configPathOverride) {
|
|
2486
|
+
const r = raw ?? {};
|
|
2487
|
+
if (r.configPath && r.vars) {
|
|
2488
|
+
if (configPathOverride) return {
|
|
2489
|
+
...r,
|
|
2490
|
+
configPath: configPathOverride
|
|
2491
|
+
};
|
|
2492
|
+
return r;
|
|
2493
|
+
}
|
|
2494
|
+
const ctx = normalizeCtx(raw);
|
|
2495
|
+
return {
|
|
2496
|
+
configPath: configPathOverride ?? CONFIG_PATH,
|
|
2497
|
+
vars: {
|
|
2498
|
+
feishuAppID: ctx.app.feishuAppID,
|
|
2499
|
+
feishuAppSecret: ctx.app.feishuAppSecret,
|
|
2500
|
+
innerAPIKey: ctx.app.innerAPIKey,
|
|
2501
|
+
gatewayToken: ctx.app.gatewayToken,
|
|
2502
|
+
baseURL: ctx.app.baseURL,
|
|
2503
|
+
expectedOrigins: ctx.app.expectedOrigins,
|
|
2504
|
+
providerFilePath: PROVIDER_FILE_PATH,
|
|
2505
|
+
secretsFilePath: SECRETS_FILE_PATH
|
|
2506
|
+
},
|
|
2507
|
+
templateVars: ctx.reset.templateVars
|
|
2508
|
+
};
|
|
2509
|
+
}
|
|
2510
|
+
function buildRepairInput(raw, configPathOverride) {
|
|
2511
|
+
const r = raw ?? {};
|
|
2512
|
+
if (r.configPath && r.vars) {
|
|
2513
|
+
if (configPathOverride) return {
|
|
2514
|
+
...r,
|
|
2515
|
+
configPath: configPathOverride
|
|
2516
|
+
};
|
|
2517
|
+
return r;
|
|
2518
|
+
}
|
|
2519
|
+
const ctx = normalizeCtx(raw);
|
|
2520
|
+
return {
|
|
2521
|
+
configPath: configPathOverride ?? CONFIG_PATH,
|
|
2522
|
+
vars: {
|
|
2523
|
+
feishuAppID: ctx.app.feishuAppID,
|
|
2524
|
+
feishuAppSecret: ctx.app.feishuAppSecret,
|
|
2525
|
+
innerAPIKey: ctx.app.innerAPIKey,
|
|
2526
|
+
gatewayToken: ctx.app.gatewayToken,
|
|
2527
|
+
baseURL: ctx.app.baseURL,
|
|
2528
|
+
expectedOrigins: ctx.app.expectedOrigins,
|
|
2529
|
+
providerFilePath: PROVIDER_FILE_PATH,
|
|
2530
|
+
secretsFilePath: SECRETS_FILE_PATH
|
|
2531
|
+
},
|
|
2532
|
+
repairData: {
|
|
2533
|
+
secretsContent: ctx.secrets.secretsContent,
|
|
2534
|
+
providerKeyContent: ctx.secrets.providerKeyContent
|
|
2535
|
+
},
|
|
2536
|
+
templateVars: ctx.reset.templateVars
|
|
2537
|
+
};
|
|
2538
|
+
}
|
|
2539
|
+
function buildResetInput(raw, configPathOverride) {
|
|
2540
|
+
const r = raw ?? {};
|
|
2541
|
+
if (r.configPath && r.vars && r.resetData) {
|
|
2542
|
+
if (configPathOverride) return {
|
|
2543
|
+
...r,
|
|
2544
|
+
configPath: configPathOverride
|
|
2545
|
+
};
|
|
2546
|
+
return r;
|
|
2547
|
+
}
|
|
2548
|
+
const ctx = normalizeCtx(raw);
|
|
2549
|
+
return {
|
|
2550
|
+
configPath: configPathOverride ?? CONFIG_PATH,
|
|
2551
|
+
vars: {
|
|
2552
|
+
feishuAppID: ctx.app.feishuAppID,
|
|
2553
|
+
feishuAppSecret: ctx.app.feishuAppSecret,
|
|
2554
|
+
innerAPIKey: ctx.app.innerAPIKey,
|
|
2555
|
+
gatewayToken: ctx.app.gatewayToken,
|
|
2556
|
+
baseURL: ctx.app.baseURL,
|
|
2557
|
+
expectedOrigins: ctx.app.expectedOrigins,
|
|
2558
|
+
providerFilePath: PROVIDER_FILE_PATH,
|
|
2559
|
+
secretsFilePath: SECRETS_FILE_PATH
|
|
2560
|
+
},
|
|
2561
|
+
resetData: {
|
|
2562
|
+
templateVars: ctx.reset.templateVars,
|
|
2563
|
+
secretsContent: ctx.secrets.secretsContent,
|
|
2564
|
+
providerKeyContent: ctx.secrets.providerKeyContent,
|
|
2565
|
+
coreBackup: ctx.reset.coreBackup,
|
|
2566
|
+
ossFileMap: ctx.install.ossFileMap,
|
|
2567
|
+
openclawTag: ctx.install.openclawTag
|
|
2568
|
+
}
|
|
2569
|
+
};
|
|
2570
|
+
}
|
|
2571
|
+
//#endregion
|
|
2572
|
+
//#region src/doctor.ts
|
|
2573
|
+
async function runDoctor(rawCtx, opts) {
|
|
2574
|
+
if (opts.fix && opts.rules.length > 0) {
|
|
2575
|
+
const repairInput = buildRepairInput(rawCtx, opts.configPath);
|
|
2576
|
+
repairInput.failedRules = opts.rules;
|
|
2577
|
+
repairInput.repairData = {
|
|
2578
|
+
...repairInput.repairData ?? {},
|
|
2579
|
+
restartCommand: ""
|
|
2580
|
+
};
|
|
2581
|
+
return { repair: runRepair(repairInput) };
|
|
2582
|
+
}
|
|
2583
|
+
const check = runCheck(buildCheckInput(rawCtx, opts.configPath));
|
|
2584
|
+
if (!opts.fix) return { failedRules: check.failedRules };
|
|
2585
|
+
const repairInput = buildRepairInput(rawCtx, opts.configPath);
|
|
2586
|
+
repairInput.failedRules = check.failedRules.standard;
|
|
2587
|
+
return {
|
|
2588
|
+
check,
|
|
2589
|
+
repair: runRepair(repairInput)
|
|
2590
|
+
};
|
|
2591
|
+
}
|
|
2592
|
+
//#endregion
|
|
2593
|
+
//#region src/help.ts
|
|
2594
|
+
const BIN = "mclaw-diagnose";
|
|
2595
|
+
function versionBanner() {
|
|
2596
|
+
return `v0.1.1-alpha.30`;
|
|
2597
|
+
}
|
|
2598
|
+
const COMMANDS = [
|
|
2599
|
+
{
|
|
2600
|
+
name: "doctor",
|
|
2601
|
+
hidden: false,
|
|
2602
|
+
summary: "Diagnose openclaw config; apply repairs with --fix",
|
|
2603
|
+
help: `USAGE
|
|
2604
|
+
${BIN} doctor [--fix] [--rule=<key>]...
|
|
2605
|
+
|
|
2606
|
+
DESCRIPTION
|
|
2607
|
+
Fetches DoctorCtx via innerapi, then runs one of three modes depending
|
|
2608
|
+
on the flags. Output is a single JSON object on stdout.
|
|
2609
|
+
|
|
2610
|
+
MODES
|
|
2611
|
+
(no flags) Check-only. Runs the rule engine against the
|
|
2612
|
+
sandbox's current openclaw config and returns
|
|
2613
|
+
{ failedRules: { standard, ai, reset } }
|
|
2614
|
+
No files are mutated. Use this when you just
|
|
2615
|
+
want to know what's wrong.
|
|
2616
|
+
|
|
2617
|
+
--fix Check + repair-all. First runs the rule engine,
|
|
2618
|
+
then repairs every failing standard-mode rule.
|
|
2619
|
+
Returns
|
|
2620
|
+
{ check: {...}, repair: {...} }
|
|
2621
|
+
Use this as the default "fix everything" action.
|
|
2622
|
+
|
|
2623
|
+
--fix --rule=<key>... Targeted repair. Skips the check pass entirely
|
|
2624
|
+
and runs repair against the listed rule keys
|
|
2625
|
+
only. Unknown keys are silently ignored.
|
|
2626
|
+
Returns { repair: {...} } with only those
|
|
2627
|
+
rules' outcomes. Use this when you already
|
|
2628
|
+
know which rules need fixing.
|
|
2629
|
+
|
|
2630
|
+
OPTIONS
|
|
2631
|
+
--fix Enable repair. See MODES above.
|
|
2632
|
+
--rule=<key> Repair only this rule key. Repeatable. Only
|
|
2633
|
+
meaningful together with --fix.
|
|
2634
|
+
|
|
2635
|
+
EXAMPLES
|
|
2636
|
+
${BIN} doctor # check only
|
|
2637
|
+
${BIN} doctor --fix # check then repair all
|
|
2638
|
+
${BIN} doctor --fix --rule=gateway # repair 'gateway' only
|
|
2639
|
+
${BIN} doctor --fix --rule=gateway --rule=jwt_token # repair multiple
|
|
2640
|
+
|
|
2641
|
+
EXIT CODES
|
|
2642
|
+
0 success
|
|
2643
|
+
1 generic error
|
|
2644
|
+
77 innerapi authentication failed (sandbox JWT expired/invalid)
|
|
2645
|
+
`
|
|
2646
|
+
},
|
|
2647
|
+
{
|
|
2648
|
+
name: "check",
|
|
2649
|
+
hidden: true,
|
|
2650
|
+
summary: "Run rule-engine check only",
|
|
2651
|
+
help: `USAGE
|
|
2652
|
+
${BIN} check [--ctx=<base64>]
|
|
2653
|
+
|
|
2654
|
+
DESCRIPTION
|
|
2655
|
+
Runs the rule engine against the sandbox's current openclaw config and
|
|
2656
|
+
returns { failedRules }. Used by sandbox_console's push-style callers
|
|
2657
|
+
that already own the ctx — end-users should prefer \`doctor\`.
|
|
2658
|
+
|
|
2659
|
+
OPTIONS
|
|
2660
|
+
--ctx=<base64> Opaque ctx JSON (base64). When absent, fetched from
|
|
2661
|
+
innerapi (same path as doctor).
|
|
2662
|
+
`
|
|
2663
|
+
},
|
|
2664
|
+
{
|
|
2665
|
+
name: "repair",
|
|
2666
|
+
hidden: true,
|
|
2667
|
+
summary: "Apply standard-mode repairs",
|
|
2668
|
+
help: `USAGE
|
|
2669
|
+
${BIN} repair [--ctx=<base64>]
|
|
2670
|
+
|
|
2671
|
+
DESCRIPTION
|
|
2672
|
+
Runs repair for the failing rules listed inside the ctx's repairData.
|
|
2673
|
+
Intended for sandbox_console's push path — end-users should use
|
|
2674
|
+
\`doctor --fix\` instead.
|
|
2675
|
+
|
|
2676
|
+
OPTIONS
|
|
2677
|
+
--ctx=<base64> Opaque ctx JSON (base64). When absent, fetched from
|
|
2678
|
+
innerapi.
|
|
2679
|
+
`
|
|
2680
|
+
},
|
|
2681
|
+
{
|
|
2682
|
+
name: "reset",
|
|
2683
|
+
hidden: true,
|
|
2684
|
+
summary: "Re-initialize sandbox via the 9-step reset pipeline",
|
|
2685
|
+
help: `USAGE
|
|
2686
|
+
${BIN} reset --async [--ctx=<base64>]
|
|
2687
|
+
${BIN} reset --worker --task-id=<id> [--ctx=<base64>]
|
|
2688
|
+
|
|
2689
|
+
DESCRIPTION
|
|
2690
|
+
Two-phase pipeline driven asynchronously: the --async invocation spawns
|
|
2691
|
+
a detached worker and returns { taskId } immediately; the --worker
|
|
2692
|
+
invocation (spawned by --async) runs the actual 9 steps and writes
|
|
2693
|
+
progress to /tmp/openclaw-diagnose/reset-<taskId>.json.
|
|
2694
|
+
|
|
2695
|
+
Poll progress with \`${BIN} get_reset_task --task-id=<id>\`.
|
|
2696
|
+
|
|
2697
|
+
OPTIONS
|
|
2698
|
+
--async Start a detached worker and return taskId on stdout.
|
|
2699
|
+
--worker Internal — run the 9-step pipeline (launched by --async).
|
|
2700
|
+
--task-id=<id> Required with --worker; identifies the progress file.
|
|
2701
|
+
--ctx=<base64> Opaque ctx JSON; fetched from innerapi when absent.
|
|
2702
|
+
`
|
|
2703
|
+
},
|
|
2704
|
+
{
|
|
2705
|
+
name: "get_reset_task",
|
|
2706
|
+
hidden: true,
|
|
2707
|
+
summary: "Poll progress of an async reset task",
|
|
2708
|
+
help: `USAGE
|
|
2709
|
+
${BIN} get_reset_task --task-id=<id>
|
|
2710
|
+
|
|
2711
|
+
DESCRIPTION
|
|
2712
|
+
Reads /tmp/openclaw-diagnose/reset-<taskId>.json and prints its content
|
|
2713
|
+
as JSON on stdout. Safe to call repeatedly while reset is in progress.
|
|
2714
|
+
|
|
2715
|
+
OPTIONS
|
|
2716
|
+
--task-id=<id> Required. Matches the id returned by \`reset --async\`.
|
|
2717
|
+
`
|
|
2718
|
+
},
|
|
2719
|
+
{
|
|
2720
|
+
name: "install-openclaw",
|
|
2721
|
+
hidden: true,
|
|
2722
|
+
summary: "Download + install the openclaw tarball",
|
|
2723
|
+
help: `USAGE
|
|
2724
|
+
${BIN} install-openclaw <tag> [--ctx=<base64> | --oss_file_map=<base64>]
|
|
2725
|
+
|
|
2726
|
+
DESCRIPTION
|
|
2727
|
+
Downloads the openclaw@<tag> tgz via the signed OSS URL found in the
|
|
2728
|
+
ctx's install.ossFileMap, extracts it into a tmpfs staging dir, and
|
|
2729
|
+
atomically swaps it into /home/gem/.npm-global/lib/node_modules/openclaw.
|
|
2730
|
+
Used by step 5 of reset.
|
|
2731
|
+
|
|
2732
|
+
ARGUMENTS
|
|
2733
|
+
<tag> Openclaw version tag, e.g. 2026.4.11.
|
|
2734
|
+
|
|
2735
|
+
OPTIONS
|
|
2736
|
+
--ctx=<base64> Opaque ctx; ossFileMap is extracted from it.
|
|
2737
|
+
--oss_file_map=... Pre-built OSS URL map (base64 JSON); skips innerapi
|
|
2738
|
+
entirely. Wins over --ctx when both provided.
|
|
2739
|
+
`
|
|
2740
|
+
},
|
|
2741
|
+
{
|
|
2742
|
+
name: "install-extension",
|
|
2743
|
+
hidden: true,
|
|
2744
|
+
summary: "Install openclaw extension package(s)",
|
|
2745
|
+
help: `USAGE
|
|
2746
|
+
${BIN} install-extension <tag> (--all | --extension=<name>...) [options]
|
|
2747
|
+
|
|
2748
|
+
DESCRIPTION
|
|
2749
|
+
Downloads + installs one or more openclaw extension tarballs
|
|
2750
|
+
(feishu, miaoda, etc.) into <home_base>/workspace/agent/extensions/,
|
|
2751
|
+
then splices installMetadata into openclaw.json's plugins.installs
|
|
2752
|
+
unless --skip-config-update is passed.
|
|
2753
|
+
|
|
2754
|
+
ARGUMENTS
|
|
2755
|
+
<tag> Openclaw version tag; extension versions resolved
|
|
2756
|
+
against the matching manifest.
|
|
2757
|
+
|
|
2758
|
+
OPTIONS
|
|
2759
|
+
--all Install every extension in the manifest.
|
|
2760
|
+
--extension=<name> Install a specific extension (repeatable).
|
|
2761
|
+
--home_base=<dir> Override the /home/gem base (tests).
|
|
2762
|
+
--config_path=<p> Override the openclaw.json path (tests).
|
|
2763
|
+
--skip-config-update Leave plugins.installs in openclaw.json untouched.
|
|
2764
|
+
--ctx=<base64> Opaque ctx; see install-openclaw for semantics.
|
|
2765
|
+
--oss_file_map=... Pre-built OSS URL map (base64 JSON).
|
|
2766
|
+
`
|
|
2767
|
+
},
|
|
2768
|
+
{
|
|
2769
|
+
name: "download-resource",
|
|
2770
|
+
hidden: true,
|
|
2771
|
+
summary: "Download + extract a single OSS resource",
|
|
2772
|
+
help: `USAGE
|
|
2773
|
+
${BIN} download-resource <tag> --role=<role> --name=<name> [--dir=<dir>] [options]
|
|
2774
|
+
|
|
2775
|
+
DESCRIPTION
|
|
2776
|
+
Downloads one resource (template, config asset, etc.) identified by
|
|
2777
|
+
(role, name) from the manifest and extracts/copies it to <dir>.
|
|
2778
|
+
|
|
2779
|
+
ARGUMENTS
|
|
2780
|
+
<tag> Openclaw version tag.
|
|
2781
|
+
|
|
2782
|
+
OPTIONS
|
|
2783
|
+
--role=<role> Package role (e.g. template, config).
|
|
2784
|
+
--name=<name> Package name within the role.
|
|
2785
|
+
--dir=<dir> Target dir (defaults to dirname(pkg.installPath)).
|
|
2786
|
+
--ctx=<base64> Opaque ctx; ossFileMap is extracted from it.
|
|
2787
|
+
--oss_file_map=... Pre-built OSS URL map (base64 JSON).
|
|
2788
|
+
`
|
|
2789
|
+
}
|
|
2790
|
+
];
|
|
2791
|
+
function parseHelpFlags(args) {
|
|
2792
|
+
return {
|
|
2793
|
+
help: args.includes("--help") || args.includes("-h"),
|
|
2794
|
+
expert: args.includes("-x") || args.includes("--expert")
|
|
2795
|
+
};
|
|
2796
|
+
}
|
|
2797
|
+
/**
|
|
2798
|
+
* Render the top-level help to the given stream. When `expert` is true,
|
|
2799
|
+
* hidden commands are listed alongside the user-facing ones.
|
|
2800
|
+
*/
|
|
2801
|
+
function formatTopLevelHelp(expert) {
|
|
2802
|
+
const visible = COMMANDS.filter((c) => !c.hidden);
|
|
2803
|
+
const hidden = COMMANDS.filter((c) => c.hidden);
|
|
2804
|
+
const pad = (s, w) => s + " ".repeat(Math.max(0, w - s.length));
|
|
2805
|
+
const w = Math.max(...COMMANDS.map((c) => c.name.length)) + 2;
|
|
2806
|
+
const lines = [];
|
|
2807
|
+
lines.push(`${BIN} — OpenClaw config diagnose / repair CLI`);
|
|
2808
|
+
lines.push(versionBanner());
|
|
2809
|
+
lines.push("");
|
|
2810
|
+
lines.push("USAGE");
|
|
2811
|
+
lines.push(` ${BIN} <command> [options]`);
|
|
2812
|
+
lines.push(` ${BIN} <command> --help per-command help`);
|
|
2813
|
+
lines.push(` ${BIN} --help this message`);
|
|
2814
|
+
lines.push("");
|
|
2815
|
+
lines.push("COMMANDS");
|
|
2816
|
+
for (const c of visible) lines.push(` ${pad(c.name, w)}${c.summary}`);
|
|
2817
|
+
if (expert && hidden.length > 0) {
|
|
2818
|
+
lines.push("");
|
|
2819
|
+
lines.push("INTERNAL COMMANDS (revealed by -x)");
|
|
2820
|
+
for (const c of hidden) lines.push(` ${pad(c.name, w)}${c.summary}`);
|
|
2821
|
+
}
|
|
2822
|
+
lines.push("");
|
|
2823
|
+
return lines.join("\n");
|
|
2824
|
+
}
|
|
2825
|
+
/** Render per-command help. Returns undefined when the name is unknown. */
|
|
2826
|
+
function formatCommandHelp(name) {
|
|
2827
|
+
const cmd = COMMANDS.find((c) => c.name === name);
|
|
2828
|
+
if (!cmd) return void 0;
|
|
2829
|
+
return cmd.help;
|
|
2830
|
+
}
|
|
2831
|
+
//#endregion
|
|
1308
2832
|
//#region src/index.ts
|
|
1309
2833
|
const args = node_process.default.argv.slice(2);
|
|
1310
|
-
const mode = args.find((a) => !a.startsWith("
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
2834
|
+
const mode = args.find((a) => !a.startsWith("-"));
|
|
2835
|
+
/**
|
|
2836
|
+
* Decode `--ctx=<base64>` into an opaque JSON object. Returns undefined when
|
|
2837
|
+
* the flag isn't present — the caller decides whether to fall back to the
|
|
2838
|
+
* innerapi or to error out.
|
|
2839
|
+
*
|
|
2840
|
+
* The object's shape is not enforced here; downstream code consumes it via
|
|
2841
|
+
* either `normalizeCtx()` (new path) or direct field access for the legacy
|
|
2842
|
+
* check/repair/reset contract still used by sandbox_console push.
|
|
2843
|
+
*/
|
|
2844
|
+
function parseCtxFlag(args) {
|
|
2845
|
+
const ctxArg = args.find((a) => a.startsWith("--ctx="));
|
|
2846
|
+
if (!ctxArg) return void 0;
|
|
2847
|
+
const b64 = ctxArg.slice(6);
|
|
2848
|
+
return JSON.parse(Buffer.from(b64, "base64").toString("utf-8"));
|
|
2849
|
+
}
|
|
2850
|
+
/**
|
|
2851
|
+
* Pull the first non-flag positional after the mode name.
|
|
2852
|
+
* (The mode itself is args[0] in the filtered set, so we skip index 0.)
|
|
2853
|
+
*/
|
|
2854
|
+
function getPositionalTag(args, modeName) {
|
|
2855
|
+
return args.find((a, i) => i > 0 && !a.startsWith("--") && a !== modeName);
|
|
2856
|
+
}
|
|
2857
|
+
function getFlag(args, name) {
|
|
2858
|
+
const prefix = `--${name}=`;
|
|
2859
|
+
return args.find((a) => a.startsWith(prefix))?.slice(prefix.length);
|
|
2860
|
+
}
|
|
2861
|
+
function getMultiFlag(args, name) {
|
|
2862
|
+
const prefix = `--${name}=`;
|
|
2863
|
+
return args.filter((a) => a.startsWith(prefix)).map((a) => a.slice(prefix.length));
|
|
2864
|
+
}
|
|
2865
|
+
async function main() {
|
|
2866
|
+
installStderrMirror();
|
|
2867
|
+
const helpFlags = parseHelpFlags(args);
|
|
2868
|
+
if (mode && helpFlags.help) {
|
|
2869
|
+
const body = formatCommandHelp(mode);
|
|
2870
|
+
if (body) {
|
|
2871
|
+
node_process.default.stdout.write(body);
|
|
2872
|
+
return;
|
|
1318
2873
|
}
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
console.error("Error: --ctx=<base64> is required");
|
|
1328
|
-
node_process.default.exit(1);
|
|
2874
|
+
node_process.default.stderr.write(`Unknown command: ${mode}\n\n`);
|
|
2875
|
+
node_process.default.stderr.write(formatTopLevelHelp(helpFlags.expert));
|
|
2876
|
+
node_process.default.exit(1);
|
|
2877
|
+
}
|
|
2878
|
+
if (!mode) {
|
|
2879
|
+
if (helpFlags.help) {
|
|
2880
|
+
node_process.default.stdout.write(formatTopLevelHelp(helpFlags.expert));
|
|
2881
|
+
return;
|
|
1329
2882
|
}
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
const
|
|
1337
|
-
if (
|
|
1338
|
-
|
|
2883
|
+
node_process.default.stderr.write(formatTopLevelHelp(helpFlags.expert));
|
|
2884
|
+
node_process.default.exit(1);
|
|
2885
|
+
}
|
|
2886
|
+
switch (mode) {
|
|
2887
|
+
case "check":
|
|
2888
|
+
case "repair": {
|
|
2889
|
+
const raw = parseCtxFlag(args) ?? await fetchCtxViaInnerApi();
|
|
2890
|
+
if (mode === "check") console.log(JSON.stringify(runCheck(buildCheckInput(raw))));
|
|
2891
|
+
else console.log(JSON.stringify(runRepair(buildRepairInput(raw))));
|
|
2892
|
+
break;
|
|
2893
|
+
}
|
|
2894
|
+
case "doctor": {
|
|
2895
|
+
const fix = args.includes("--fix");
|
|
2896
|
+
const rules = getMultiFlag(args, "rule");
|
|
2897
|
+
const result = await runDoctor(await fetchCtxViaInnerApi(), {
|
|
2898
|
+
fix,
|
|
2899
|
+
rules
|
|
2900
|
+
});
|
|
2901
|
+
console.log(JSON.stringify(result));
|
|
2902
|
+
break;
|
|
2903
|
+
}
|
|
2904
|
+
case "reset":
|
|
2905
|
+
if (args.includes("--async")) {
|
|
2906
|
+
const ctxArg = args.find((a) => a.startsWith("--ctx="));
|
|
2907
|
+
let ctxBase64;
|
|
2908
|
+
if (ctxArg) ctxBase64 = ctxArg.slice(6);
|
|
2909
|
+
else {
|
|
2910
|
+
const fetched = await fetchCtxViaInnerApi();
|
|
2911
|
+
ctxBase64 = Buffer.from(JSON.stringify(fetched), "utf-8").toString("base64");
|
|
2912
|
+
}
|
|
2913
|
+
console.log(JSON.stringify(startAsyncReset(ctxBase64)));
|
|
2914
|
+
} else if (args.includes("--worker")) {
|
|
2915
|
+
const taskId = args.find((a) => a.startsWith("--task-id="))?.slice(10);
|
|
2916
|
+
if (!taskId) {
|
|
2917
|
+
console.error("Error: --task-id=<id> is required for worker");
|
|
2918
|
+
node_process.default.exit(1);
|
|
2919
|
+
}
|
|
2920
|
+
const resultFile = resetResultFile(taskId);
|
|
2921
|
+
await runReset(buildResetInput(parseCtxFlag(args) ?? await fetchCtxViaInnerApi()), taskId, resultFile);
|
|
2922
|
+
} else {
|
|
2923
|
+
console.error("Usage: reset --async [--ctx=<base64>] | reset --worker --task-id=<id> [--ctx=<base64>]");
|
|
1339
2924
|
node_process.default.exit(1);
|
|
1340
2925
|
}
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
const ctx = args.find((a) => a.startsWith("--ctx="))?.slice(6);
|
|
2926
|
+
break;
|
|
2927
|
+
case "get_reset_task": {
|
|
1344
2928
|
const taskId = args.find((a) => a.startsWith("--task-id="))?.slice(10);
|
|
1345
|
-
if (!
|
|
1346
|
-
console.error("Error: --
|
|
2929
|
+
if (!taskId) {
|
|
2930
|
+
console.error("Error: --task-id=<id> is required");
|
|
1347
2931
|
node_process.default.exit(1);
|
|
1348
2932
|
}
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
} else {
|
|
1352
|
-
console.error("Usage: reset --async --ctx=<base64> | reset --worker --task-id=<id> --ctx=<base64>");
|
|
1353
|
-
node_process.default.exit(1);
|
|
2933
|
+
console.log(JSON.stringify(getResetTask(taskId)));
|
|
2934
|
+
break;
|
|
1354
2935
|
}
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
2936
|
+
case "install-openclaw": {
|
|
2937
|
+
const tag = getPositionalTag(args, "install-openclaw");
|
|
2938
|
+
if (!tag) {
|
|
2939
|
+
console.error("Usage: install-openclaw <tag> [--ctx=<base64> | --oss_file_map=<base64>]");
|
|
2940
|
+
node_process.default.exit(1);
|
|
2941
|
+
}
|
|
2942
|
+
const ossFileMapFlag = getFlag(args, "oss_file_map");
|
|
2943
|
+
let installOssFileMap;
|
|
2944
|
+
if (!ossFileMapFlag) installOssFileMap = normalizeCtx(parseCtxFlag(args) ?? await fetchCtxViaInnerApi()).install.ossFileMap;
|
|
2945
|
+
await installOpenclaw(tag, resolveOssFileMap({
|
|
2946
|
+
ossFileMapFlag,
|
|
2947
|
+
installOssFileMap
|
|
2948
|
+
}));
|
|
2949
|
+
console.log(JSON.stringify({ ok: true }));
|
|
2950
|
+
break;
|
|
1361
2951
|
}
|
|
1362
|
-
|
|
1363
|
-
|
|
2952
|
+
case "install-extension": {
|
|
2953
|
+
const tag = getPositionalTag(args, "install-extension");
|
|
2954
|
+
if (!tag) {
|
|
2955
|
+
console.error("Usage: install-extension <tag> (--all | --extension=<name>...) [--home_base=<dir>] [--config_path=<path>] [--skip-config-update] [--ctx=<base64> | --oss_file_map=<base64>]");
|
|
2956
|
+
node_process.default.exit(1);
|
|
2957
|
+
}
|
|
2958
|
+
const all = args.includes("--all");
|
|
2959
|
+
const names = getMultiFlag(args, "extension");
|
|
2960
|
+
const homeBase = getFlag(args, "home_base");
|
|
2961
|
+
const configPath = getFlag(args, "config_path");
|
|
2962
|
+
const skipConfigUpdate = args.includes("--skip-config-update");
|
|
2963
|
+
const ossFileMapFlag = getFlag(args, "oss_file_map");
|
|
2964
|
+
let installOssFileMap;
|
|
2965
|
+
if (!ossFileMapFlag) installOssFileMap = normalizeCtx(parseCtxFlag(args) ?? await fetchCtxViaInnerApi()).install.ossFileMap;
|
|
2966
|
+
await installExtension(tag, resolveOssFileMap({
|
|
2967
|
+
ossFileMapFlag,
|
|
2968
|
+
installOssFileMap
|
|
2969
|
+
}), {
|
|
2970
|
+
all,
|
|
2971
|
+
names: names.length > 0 ? names : void 0,
|
|
2972
|
+
homeBase,
|
|
2973
|
+
configPath,
|
|
2974
|
+
skipConfigUpdate
|
|
2975
|
+
});
|
|
2976
|
+
console.log(JSON.stringify({ ok: true }));
|
|
2977
|
+
break;
|
|
2978
|
+
}
|
|
2979
|
+
case "download-resource": {
|
|
2980
|
+
const tag = getPositionalTag(args, "download-resource");
|
|
2981
|
+
if (!tag) {
|
|
2982
|
+
console.error("Usage: download-resource <tag> --role=<role> --name=<name> [--dir=<dir>] [--ctx=<base64> | --oss_file_map=<base64>]");
|
|
2983
|
+
node_process.default.exit(1);
|
|
2984
|
+
}
|
|
2985
|
+
const role = getFlag(args, "role");
|
|
2986
|
+
const name = getFlag(args, "name");
|
|
2987
|
+
const dir = getFlag(args, "dir");
|
|
2988
|
+
if (!role || !name) {
|
|
2989
|
+
console.error("Usage: download-resource <tag> --role=<role> --name=<name> [--dir=<dir>]");
|
|
2990
|
+
node_process.default.exit(1);
|
|
2991
|
+
}
|
|
2992
|
+
const ossFileMapFlag = getFlag(args, "oss_file_map");
|
|
2993
|
+
let installOssFileMap;
|
|
2994
|
+
if (!ossFileMapFlag) installOssFileMap = normalizeCtx(parseCtxFlag(args) ?? await fetchCtxViaInnerApi()).install.ossFileMap;
|
|
2995
|
+
await downloadResource(tag, resolveOssFileMap({
|
|
2996
|
+
ossFileMapFlag,
|
|
2997
|
+
installOssFileMap
|
|
2998
|
+
}), {
|
|
2999
|
+
role,
|
|
3000
|
+
name,
|
|
3001
|
+
dir
|
|
3002
|
+
});
|
|
3003
|
+
console.log(JSON.stringify({ ok: true }));
|
|
3004
|
+
break;
|
|
3005
|
+
}
|
|
3006
|
+
default:
|
|
3007
|
+
node_process.default.stderr.write(`Unknown command: ${mode}\n\n`);
|
|
3008
|
+
node_process.default.stderr.write(formatTopLevelHelp(helpFlags.expert));
|
|
3009
|
+
node_process.default.exit(1);
|
|
1364
3010
|
}
|
|
1365
|
-
default:
|
|
1366
|
-
console.error("Usage: mclaw-diagnose <check|repair|backup|reset|get_reset_task> [options]");
|
|
1367
|
-
node_process.default.exit(1);
|
|
1368
3011
|
}
|
|
3012
|
+
main().catch((err) => {
|
|
3013
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
3014
|
+
console.error(`Error: ${msg}`);
|
|
3015
|
+
node_process.default.exit(1);
|
|
3016
|
+
});
|
|
1369
3017
|
//#endregion
|