metheus-governance-mcp-cli 0.2.87 → 0.2.89

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -12,6 +12,7 @@ import {
12
12
  DEFAULT_LOCAL_AI_CLIENT,
13
13
  resolveLocalAIExecutionModel,
14
14
  resolveGeminiReasoningConfig,
15
+ suggestLocalAIModelDisplayName,
15
16
  SUPPORTED_LOCAL_AI_CLIENTS,
16
17
  normalizeLocalAIClientName,
17
18
  normalizeLocalAIPermissionMode,
@@ -856,13 +857,14 @@ function normalizeRunnerRoleProfileName(rawValue) {
856
857
 
857
858
  function normalizeRunnerRoleProfile(profileName, rawProfile, defaults = {}) {
858
859
  const profile = safeObject(rawProfile);
860
+ const normalizedClient = normalizeLocalAIClientName(
861
+ profile.client || profile.cli || defaults.client || DEFAULT_BOT_RUNNER_CLIENT,
862
+ normalizeLocalAIClientName(defaults.client || DEFAULT_BOT_RUNNER_CLIENT),
863
+ );
859
864
  return {
860
865
  name: normalizeRunnerRoleProfileName(profileName || profile.name),
861
- client: normalizeLocalAIClientName(
862
- profile.client || profile.cli || defaults.client || DEFAULT_BOT_RUNNER_CLIENT,
863
- normalizeLocalAIClientName(defaults.client || DEFAULT_BOT_RUNNER_CLIENT),
864
- ),
865
- model: String(profile.model || defaults.model || "").trim(),
866
+ client: normalizedClient,
867
+ model: suggestLocalAIModelDisplayName(normalizedClient, profile.model || defaults.model || ""),
866
868
  permissionMode: normalizeLocalAIPermissionMode(
867
869
  profile.permission_mode
868
870
  || profile.permissionMode
@@ -968,6 +970,7 @@ function serializeRunnerRoleProfile(profile) {
968
970
  function normalizeRunnerBotBinding(bindingName, rawBinding, defaults = {}) {
969
971
  const binding = safeObject(rawBinding);
970
972
  const normalizedName = String(bindingName || binding.name || "").trim();
973
+ const normalizedClient = normalizeLocalAIClientName(binding.client || defaults.client || "", "");
971
974
  let botID = String(binding.bot_id || binding.botID || defaults.botID || "").trim();
972
975
  let botName = String(binding.bot_name || binding.botName || defaults.botName || "").trim();
973
976
  if (!botID && !botName && normalizedName) {
@@ -984,8 +987,8 @@ function normalizeRunnerBotBinding(bindingName, rawBinding, defaults = {}) {
984
987
  roleProfile: normalizeRunnerRoleProfileName(
985
988
  binding.role_profile || binding.roleProfile || binding.execution_profile || binding.executionProfile || defaults.roleProfile,
986
989
  ),
987
- client: normalizeLocalAIClientName(binding.client || defaults.client || "", ""),
988
- model: String(binding.model || defaults.model || "").trim(),
990
+ client: normalizedClient,
991
+ model: suggestLocalAIModelDisplayName(normalizedClient, binding.model || defaults.model || ""),
989
992
  permissionMode: normalizeLocalAIPermissionMode(
990
993
  binding.permission_mode || binding.permissionMode || defaults.permissionMode || "",
991
994
  "",
@@ -3359,6 +3362,7 @@ function buildBotCommandDeps() {
3359
3362
  normalizeLocalAIClientName,
3360
3363
  normalizeLocalAIPermissionMode,
3361
3364
  normalizeLocalAIReasoningEffort,
3365
+ suggestLocalAIModelDisplayName,
3362
3366
  resolveGeminiReasoningConfig,
3363
3367
  supportedLocalAIClients: SUPPORTED_LOCAL_AI_CLIENTS,
3364
3368
  summarizeProviderSupport,
@@ -5211,6 +5215,7 @@ TELEGRAM_BOT_REVIEW_TOKEN=review-token
5211
5215
  cliPath: fileURLToPath(import.meta.url),
5212
5216
  parseSimpleEnvText,
5213
5217
  resolveLocalAIExecutionModel,
5218
+ suggestLocalAIModelDisplayName,
5214
5219
  resolveGeminiReasoningConfig,
5215
5220
  });
5216
5221
 
@@ -58,6 +58,14 @@ function uniqueNormalizedValues(values, deps, fallback = "") {
58
58
  return fallbackKey ? [fallbackKey] : [];
59
59
  }
60
60
 
61
+ function suggestDisplayedAIModel(deps, clientName, rawModelValue = "") {
62
+ const helper = deps?.suggestLocalAIModelDisplayName;
63
+ if (typeof helper === "function") {
64
+ return String(helper(clientName, rawModelValue)).trim();
65
+ }
66
+ return String(rawModelValue || "").trim();
67
+ }
68
+
61
69
  function normalizeServerBotIdentityText(rawValue) {
62
70
  return String(rawValue || "").trim().replace(/^@+/, "").toLowerCase();
63
71
  }
@@ -633,21 +641,22 @@ function displayLocalAIClientName(clientName) {
633
641
  return String(clientName || "").trim();
634
642
  }
635
643
 
636
- function formatRuntimeReasoningField(clientName, reasoningEffort) {
644
+ function formatRuntimeReasoningField(clientName, reasoningEffort, style = "equals") {
637
645
  const normalizedClient = String(clientName || "").trim().toLowerCase();
638
646
  const normalizedEffort = String(reasoningEffort || "").trim();
647
+ const separator = style === "colon" ? ":" : "=";
639
648
  if (!normalizedEffort) {
640
- if (normalizedClient === "claude") return "thinking=(blank)";
641
- if (normalizedClient === "gemini") return "thinking=(blank)";
642
- return "reasoning=(blank)";
649
+ if (normalizedClient === "claude") return `thinking${separator}(blank)`;
650
+ if (normalizedClient === "gemini") return `thinking${separator}(blank)`;
651
+ return `reasoning${separator}(blank)`;
643
652
  }
644
653
  if (normalizedClient === "claude") {
645
- return `thinking=${normalizedEffort} (Claude effort)`;
654
+ return `thinking${separator}${normalizedEffort} (Claude effort)`;
646
655
  }
647
656
  if (normalizedClient === "gemini") {
648
- return `thinking=${normalizedEffort} (Gemini level)`;
657
+ return `thinking${separator}${normalizedEffort} (Gemini level)`;
649
658
  }
650
- return `reasoning=${normalizedEffort}`;
659
+ return `reasoning${separator}${normalizedEffort}`;
651
660
  }
652
661
 
653
662
  function telegramEntryDisplayDescription(entry) {
@@ -739,9 +748,24 @@ async function editTelegramBotGuided(ui, parsed, selected, current, flags, deps)
739
748
  let groupedServerName = "";
740
749
  let serverManagedIdentity = Boolean(current.serverBotID || current.__preferServerIdentity);
741
750
  if (!serverManagedIdentity) {
742
- const initialServerBot = await autoResolveTelegramServerBot(current, flags, deps);
743
- if (String(initialServerBot.botID || "").trim() || initialServerBot.matchMode === "group") {
751
+ const initialBinding = await resolveTelegramServerBindingDetails(
752
+ {
753
+ serverBotID: current.serverBotID,
754
+ botUsername: current.username,
755
+ botKey: current.key,
756
+ roleProfile: current.roleProfile,
757
+ },
758
+ flags,
759
+ deps,
760
+ );
761
+ if (initialBinding.ok && (String(initialBinding.serverBotID || "").trim() || initialBinding.mode === "group")) {
744
762
  serverManagedIdentity = true;
763
+ current.__preferServerIdentity = true;
764
+ if (initialBinding.mode === "single") {
765
+ current.serverBotID = String(initialBinding.serverBotID || "").trim();
766
+ current.roleProfile = String(initialBinding.role || current.roleProfile || "").trim();
767
+ applyRoleProfileDefaults(current, resolveRoleProfileDefaults(current.roleProfile, deps));
768
+ }
745
769
  }
746
770
  }
747
771
  let usernameAction = "keep";
@@ -1568,9 +1592,10 @@ function resolveRoleProfileDefaults(roleProfileName, deps) {
1568
1592
  }
1569
1593
  const config = requireDependency(deps, "loadBotRunnerConfig")({ persistIfNeeded: true });
1570
1594
  const profile = safeObject(safeObject(config.roleProfiles || {})[profileName]);
1595
+ const normalizedClient = requireDependency(deps, "normalizeLocalAIClientName")(profile.client || "", "");
1571
1596
  return {
1572
- client: requireDependency(deps, "normalizeLocalAIClientName")(profile.client || "", ""),
1573
- model: String(profile.model || "").trim(),
1597
+ client: normalizedClient,
1598
+ model: suggestDisplayedAIModel(deps, normalizedClient, profile.model || ""),
1574
1599
  permissionMode: requireDependency(deps, "normalizeLocalAIPermissionMode")(
1575
1600
  profile.permission_mode || profile.permissionMode || "",
1576
1601
  "",
@@ -1618,10 +1643,11 @@ function currentRoleProfileState(roleName, deps) {
1618
1643
  const defaults = resolveRoleProfileDefaults(normalizedRole, deps);
1619
1644
  const config = requireDependency(deps, "loadBotRunnerConfig")({ persistIfNeeded: true });
1620
1645
  const profile = safeObject(safeObject(config.roleProfiles || {})[normalizedRole]);
1646
+ const normalizedClient = requireDependency(deps, "normalizeLocalAIClientName")(profile.client || defaults.client || "", "");
1621
1647
  return {
1622
1648
  role: normalizedRole,
1623
- client: requireDependency(deps, "normalizeLocalAIClientName")(profile.client || defaults.client || "", ""),
1624
- model: String(profile.model || defaults.model || "").trim(),
1649
+ client: normalizedClient,
1650
+ model: suggestDisplayedAIModel(deps, normalizedClient, profile.model || defaults.model || ""),
1625
1651
  permissionMode: requireDependency(deps, "normalizeLocalAIPermissionMode")(
1626
1652
  profile.permission_mode || profile.permissionMode || defaults.permissionMode || "",
1627
1653
  "",
@@ -1639,7 +1665,7 @@ function formatRoleProfileSummary(profile) {
1639
1665
  current.client ? `client:${displayLocalAIClientName(current.client)}` : "client:(blank)",
1640
1666
  current.model ? `model:${current.model}` : "model:(blank)",
1641
1667
  current.permissionMode ? `permission:${current.permissionMode}` : "permission:(blank)",
1642
- current.reasoningEffort ? `reasoning:${current.reasoningEffort}` : "reasoning:(blank)",
1668
+ formatRuntimeReasoningField(current.client, current.reasoningEffort, "colon"),
1643
1669
  ].join(" | ");
1644
1670
  }
1645
1671
 
@@ -256,15 +256,33 @@ function localAIModelMappingsForClient(clientName) {
256
256
  return Array.isArray(LOCAL_AI_MODEL_MAPPINGS[normalizedClient]) ? LOCAL_AI_MODEL_MAPPINGS[normalizedClient] : [];
257
257
  }
258
258
 
259
- export function resolveLocalAIExecutionModel(clientName, rawModelValue = "") {
259
+ function findLocalAIModelMapping(clientName, rawModelValue = "") {
260
260
  const modelValue = String(rawModelValue || "").trim();
261
- if (!modelValue) return "";
261
+ if (!modelValue) return null;
262
262
  const normalizedModel = normalizeModelAliasText(modelValue);
263
- const match = localAIModelMappingsForClient(clientName).find((item) => (
263
+ return localAIModelMappingsForClient(clientName).find((item) => (
264
264
  normalizeModelAliasText(item.display) === normalizedModel
265
265
  || normalizeModelAliasText(item.execution) === normalizedModel
266
266
  || ensureArray(item.aliases).some((alias) => normalizeModelAliasText(alias) === normalizedModel)
267
- ));
267
+ )) || null;
268
+ }
269
+
270
+ export function suggestLocalAIModelDisplayName(clientName, rawModelValue = "") {
271
+ const modelValue = String(rawModelValue || "").trim();
272
+ const match = findLocalAIModelMapping(clientName, modelValue);
273
+ if (match) {
274
+ return String(match.display || modelValue).trim();
275
+ }
276
+ if (modelValue) {
277
+ return modelValue;
278
+ }
279
+ return String(localAIModelMappingsForClient(clientName)[0]?.display || "").trim();
280
+ }
281
+
282
+ export function resolveLocalAIExecutionModel(clientName, rawModelValue = "") {
283
+ const modelValue = String(rawModelValue || "").trim();
284
+ if (!modelValue) return "";
285
+ const match = findLocalAIModelMapping(clientName, modelValue);
268
286
  return match ? String(match.execution || "").trim() : modelValue;
269
287
  }
270
288
 
@@ -229,6 +229,7 @@ export async function runSelftestBotCommands(push, deps) {
229
229
  const cliPath = String(requireDependency(deps, "cliPath") || "").trim();
230
230
  const parseSimpleEnvText = requireDependency(deps, "parseSimpleEnvText");
231
231
  const resolveLocalAIExecutionModel = requireDependency(deps, "resolveLocalAIExecutionModel");
232
+ const suggestLocalAIModelDisplayName = requireDependency(deps, "suggestLocalAIModelDisplayName");
232
233
  const resolveGeminiReasoningConfig = requireDependency(deps, "resolveGeminiReasoningConfig");
233
234
  let tempHome = "";
234
235
  let mock = null;
@@ -254,6 +255,18 @@ export async function runSelftestBotCommands(push, deps) {
254
255
  ].join(" "),
255
256
  );
256
257
 
258
+ push(
259
+ "blank_model_defaults_to_first_display_model_for_each_client",
260
+ suggestLocalAIModelDisplayName("codex", "") === "gpt-5.4"
261
+ && suggestLocalAIModelDisplayName("claude", "") === "Sonnet 4.6r"
262
+ && suggestLocalAIModelDisplayName("gemini", "") === "gemini-3.1-pro",
263
+ [
264
+ `codex=${suggestLocalAIModelDisplayName("codex", "")}`,
265
+ `claude=${suggestLocalAIModelDisplayName("claude", "")}`,
266
+ `gemini=${suggestLocalAIModelDisplayName("gemini", "")}`,
267
+ ].join(" "),
268
+ );
269
+
257
270
  const geminiLowReasoning = resolveGeminiReasoningConfig("gemini-3.1-pro", "low");
258
271
  const geminiMediumReasoning = resolveGeminiReasoningConfig("gemini-3.1-pro", "medium");
259
272
  const geminiHighReasoning = resolveGeminiReasoningConfig("gemini-3.1-pro", "high");
@@ -403,7 +416,7 @@ export async function runSelftestBotCommands(push, deps) {
403
416
  "3", // select role to edit: worker
404
417
  "2", // worker: edit settings
405
418
  "3", // worker AI client: claude
406
- "2", // worker AI model: Sonnet 4.6r
419
+ "3", // worker AI model: Sonnet 4.6r
407
420
  "4", // worker permission: danger_full_access
408
421
  "4", // worker reasoning: high
409
422
  "y", // edit another role
@@ -453,8 +466,9 @@ export async function runSelftestBotCommands(push, deps) {
453
466
  "bot_show_reports_grouped_server_roles",
454
467
  safeObject(groupedShowPayload.serverBinding).mode === "group"
455
468
  && safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).worker?.client === "claude"
456
- && safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).approval?.client === "gemini",
457
- `mode=${String(safeObject(groupedShowPayload.serverBinding).mode || "")} worker=${String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).worker?.client || "")} approval=${String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).approval?.client || "")}`,
469
+ && safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).approval?.client === "gemini"
470
+ && String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).monitor?.model || "") === "gpt-5.4",
471
+ `mode=${String(safeObject(groupedShowPayload.serverBinding).mode || "")} worker=${String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).worker?.client || "")} approval=${String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).approval?.client || "")} monitor_model=${String(safeObject(safeObject(groupedShowPayload.serverBinding).effectiveRoleProfiles).monitor?.model || "")}`,
458
472
  );
459
473
  } finally {
460
474
  await groupedMock.close();
@@ -542,7 +556,7 @@ export async function runSelftestBotCommands(push, deps) {
542
556
  "2", // change AI client
543
557
  "4", // gemini
544
558
  "2", // change AI model
545
- "2", // gemini model: gemini-3.1-pro
559
+ "3", // gemini model: gemini-3.1-pro
546
560
  "2", // change permission mode
547
561
  "3", // workspace_write
548
562
  "2", // change reasoning effort
@@ -326,7 +326,7 @@ export async function runSelftestRunnerScenarios(push, deps) {
326
326
  envBindingExecutionPlan.mode === "role_profile"
327
327
  && envBindingExecutionPlan.roleProfileName === "approval"
328
328
  && envBindingExecutionPlan.roleProfile?.client === "claude"
329
- && envBindingExecutionPlan.roleProfile?.model === "sonnet"
329
+ && envBindingExecutionPlan.roleProfile?.model === "Sonnet 4.6r"
330
330
  && envBindingExecutionPlan.roleProfile?.reasoningEffort === "medium",
331
331
  `role_profile=${envBindingExecutionPlan.roleProfileName} client=${String(envBindingExecutionPlan.roleProfile?.client || "(none)")} model=${String(envBindingExecutionPlan.roleProfile?.model || "(none)")}`,
332
332
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "metheus-governance-mcp-cli",
3
- "version": "0.2.87",
3
+ "version": "0.2.89",
4
4
  "description": "Metheus Governance MCP CLI (setup + stdio proxy)",
5
5
  "type": "module",
6
6
  "files": [