@tenex-chat/backend 0.9.4 → 0.9.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. package/README.md +5 -1
  2. package/dist/daemon-wrapper.cjs +47 -0
  3. package/dist/index.js +59268 -0
  4. package/dist/wrapper.js +171 -0
  5. package/package.json +19 -27
  6. package/src/agents/AgentRegistry.ts +9 -7
  7. package/src/agents/AgentStorage.ts +24 -1
  8. package/src/agents/agent-installer.ts +6 -0
  9. package/src/agents/agent-loader.ts +7 -2
  10. package/src/agents/constants.ts +10 -2
  11. package/src/agents/execution/AgentExecutor.ts +35 -6
  12. package/src/agents/execution/StreamCallbacks.ts +53 -13
  13. package/src/agents/execution/StreamExecutionHandler.ts +110 -16
  14. package/src/agents/execution/StreamSetup.ts +19 -9
  15. package/src/agents/execution/ToolEventHandlers.ts +112 -0
  16. package/src/agents/role-categories.ts +53 -0
  17. package/src/agents/types/runtime.ts +7 -0
  18. package/src/agents/types/storage.ts +7 -0
  19. package/src/commands/agent/import/openclaw-distiller.ts +63 -7
  20. package/src/commands/agent/import/openclaw-reader.ts +54 -0
  21. package/src/commands/agent/import/openclaw.ts +120 -29
  22. package/src/commands/agent/index.ts +83 -2
  23. package/src/commands/setup/display.ts +123 -0
  24. package/src/commands/setup/embed.ts +13 -13
  25. package/src/commands/setup/global-system-prompt.ts +15 -17
  26. package/src/commands/setup/image.ts +17 -20
  27. package/src/commands/setup/interactive.ts +37 -20
  28. package/src/commands/setup/llm.ts +12 -7
  29. package/src/commands/setup/onboarding.ts +1580 -248
  30. package/src/commands/setup/providers.ts +3 -3
  31. package/src/conversations/ConversationStore.ts +23 -2
  32. package/src/conversations/MessageBuilder.ts +51 -73
  33. package/src/conversations/formatters/utils/conversation-transcript-formatter.ts +425 -0
  34. package/src/conversations/search/embeddings/ConversationEmbeddingService.ts +40 -98
  35. package/src/conversations/search/embeddings/ConversationIndexingJob.ts +40 -52
  36. package/src/conversations/services/ConversationSummarizer.ts +1 -2
  37. package/src/conversations/types.ts +11 -0
  38. package/src/daemon/Daemon.ts +78 -57
  39. package/src/daemon/ProjectRuntime.ts +6 -12
  40. package/src/daemon/SubscriptionManager.ts +13 -0
  41. package/src/daemon/index.ts +0 -1
  42. package/src/event-handler/index.ts +1 -0
  43. package/src/index.ts +20 -1
  44. package/src/llm/ChunkHandler.ts +1 -1
  45. package/src/llm/FinishHandler.ts +28 -4
  46. package/src/llm/LLMConfigEditor.ts +218 -106
  47. package/src/llm/index.ts +0 -4
  48. package/src/llm/meta/MetaModelResolver.ts +3 -18
  49. package/src/llm/middleware/message-sanitizer.ts +153 -0
  50. package/src/llm/providers/ollama-models.ts +0 -38
  51. package/src/llm/service.ts +50 -15
  52. package/src/llm/types.ts +0 -12
  53. package/src/llm/utils/ConfigurationManager.ts +88 -465
  54. package/src/llm/utils/ConfigurationTester.ts +42 -185
  55. package/src/llm/utils/ModelSelector.ts +156 -92
  56. package/src/llm/utils/ProviderConfigUI.ts +10 -141
  57. package/src/llm/utils/models-dev-cache.ts +102 -23
  58. package/src/llm/utils/provider-select-prompt.ts +284 -0
  59. package/src/llm/utils/provider-setup.ts +81 -34
  60. package/src/llm/utils/variant-list-prompt.ts +361 -0
  61. package/src/nostr/AgentEventDecoder.ts +1 -0
  62. package/src/nostr/AgentEventEncoder.ts +37 -0
  63. package/src/nostr/AgentProfilePublisher.ts +13 -0
  64. package/src/nostr/AgentPublisher.ts +26 -0
  65. package/src/nostr/kinds.ts +1 -0
  66. package/src/nostr/ndkClient.ts +4 -1
  67. package/src/nostr/types.ts +12 -0
  68. package/src/prompts/fragments/25-rag-instructions.ts +22 -21
  69. package/src/prompts/fragments/31-agents-md-guidance.ts +7 -21
  70. package/src/prompts/fragments/index.ts +2 -0
  71. package/src/prompts/utils/systemPromptBuilder.ts +18 -28
  72. package/src/services/AgentDefinitionMonitor.ts +8 -0
  73. package/src/services/ConfigService.ts +34 -0
  74. package/src/services/PubkeyService.ts +7 -1
  75. package/src/services/compression/CompressionService.ts +133 -74
  76. package/src/services/compression/compression-utils.ts +110 -19
  77. package/src/services/config/types.ts +0 -6
  78. package/src/services/dispatch/AgentDispatchService.ts +79 -0
  79. package/src/services/intervention/InterventionService.ts +78 -5
  80. package/src/services/nip46/Nip46SigningService.ts +30 -1
  81. package/src/services/projects/ProjectContext.ts +8 -6
  82. package/src/services/rag/RAGCollectionRegistry.ts +199 -0
  83. package/src/services/rag/RAGDatabaseService.ts +2 -7
  84. package/src/services/rag/RAGOperations.ts +25 -45
  85. package/src/services/rag/RAGService.ts +0 -31
  86. package/src/services/rag/RagSubscriptionService.ts +71 -122
  87. package/src/services/rag/rag-utils.ts +13 -0
  88. package/src/services/ral/RALRegistry.ts +25 -184
  89. package/src/services/reports/ReportEmbeddingService.ts +63 -113
  90. package/src/services/search/UnifiedSearchService.ts +115 -4
  91. package/src/services/search/index.ts +1 -0
  92. package/src/services/search/projectFilter.ts +20 -4
  93. package/src/services/search/providers/ConversationSearchProvider.ts +1 -0
  94. package/src/services/search/providers/GenericCollectionSearchProvider.ts +81 -0
  95. package/src/services/search/providers/LessonSearchProvider.ts +1 -8
  96. package/src/services/search/providers/ReportSearchProvider.ts +1 -0
  97. package/src/services/search/types.ts +24 -3
  98. package/src/services/trust-pubkeys/SystemPubkeyListService.ts +148 -0
  99. package/src/services/trust-pubkeys/TrustPubkeyService.ts +70 -9
  100. package/src/telemetry/setup.ts +2 -13
  101. package/src/tools/implementations/ask.ts +3 -3
  102. package/src/tools/implementations/conversation_get.ts +28 -268
  103. package/src/tools/implementations/fs_grep.ts +6 -6
  104. package/src/tools/implementations/fs_read.ts +2 -0
  105. package/src/tools/implementations/fs_write.ts +2 -0
  106. package/src/tools/implementations/learn.ts +38 -50
  107. package/src/tools/implementations/rag_add_documents.ts +6 -4
  108. package/src/tools/implementations/rag_create_collection.ts +37 -4
  109. package/src/tools/implementations/rag_delete_collection.ts +9 -0
  110. package/src/tools/implementations/{search.ts → rag_search.ts} +31 -25
  111. package/src/tools/registry.ts +7 -8
  112. package/src/tools/types.ts +11 -2
  113. package/src/tools/utils/transcript-args.ts +13 -0
  114. package/src/utils/cli-theme.ts +13 -0
  115. package/src/utils/logger.ts +55 -0
  116. package/src/utils/metadataKeys.ts +17 -0
  117. package/src/utils/sqlEscaping.ts +39 -0
  118. package/src/wrapper.ts +7 -3
  119. package/dist/src/index.js +0 -46778
  120. package/dist/tenex-backend-wrapper.cjs +0 -3
  121. package/src/agents/execution/constants.ts +0 -16
  122. package/src/agents/execution/index.ts +0 -3
  123. package/src/agents/index.ts +0 -4
  124. package/src/commands/agent.ts +0 -215
  125. package/src/conversations/formatters/DelegationXmlFormatter.ts +0 -64
  126. package/src/conversations/formatters/index.ts +0 -9
  127. package/src/conversations/index.ts +0 -2
  128. package/src/conversations/utils/content-utils.ts +0 -69
  129. package/src/daemon/UnixSocketTransport.ts +0 -318
  130. package/src/event-handler/newConversation.ts +0 -165
  131. package/src/events/NDKProjectStatus.ts +0 -384
  132. package/src/events/index.ts +0 -4
  133. package/src/lib/json-parser.ts +0 -30
  134. package/src/llm/RecordingState.ts +0 -37
  135. package/src/llm/StreamPublisher.ts +0 -40
  136. package/src/llm/middleware/flight-recorder.ts +0 -188
  137. package/src/llm/utils/claudeCodePromptCompiler.ts +0 -141
  138. package/src/nostr/constants.ts +0 -38
  139. package/src/prompts/core/index.ts +0 -3
  140. package/src/prompts/index.ts +0 -21
  141. package/src/services/image/index.ts +0 -12
  142. package/src/services/status/index.ts +0 -11
  143. package/src/telemetry/diagnostics.ts +0 -27
  144. package/src/tools/implementations/rag_query.ts +0 -107
  145. package/src/types/index.ts +0 -46
  146. package/src/utils/agentFetcher.ts +0 -107
  147. package/src/utils/conversation-utils.ts +0 -1
  148. package/src/utils/process.ts +0 -49
@@ -1,4 +1,4 @@
1
- import type { LLMConfiguration, MetaModelConfiguration, MetaModelVariant, TenexLLMs } from "@/services/config/types";
1
+ import type { LLMConfiguration, TenexLLMs } from "@/services/config/types";
2
2
  import { hasApiKey } from "@/llm/providers/key-manager";
3
3
 
4
4
  /**
@@ -9,25 +9,29 @@ type TenexLLMsWithProviders = TenexLLMs & {
9
9
  };
10
10
  import chalk from "chalk";
11
11
  import inquirer from "inquirer";
12
+ import { inquirerTheme } from "@/utils/cli-theme";
13
+ import * as display from "@/commands/setup/display";
12
14
  import { PROVIDER_IDS } from "@/llm/providers/provider-ids";
13
15
  import type { AISdkProvider } from "../types";
14
16
  import { ModelSelector } from "./ModelSelector";
15
17
  import { ProviderConfigUI } from "./ProviderConfigUI";
16
- import { listCodexModels, formatCodexModel } from "./codex-models";
18
+ import { listCodexModels } from "./codex-models";
19
+ import { variantListPrompt } from "./variant-list-prompt";
17
20
 
18
21
  /**
19
22
  * Manages LLM configuration CRUD operations
20
23
  */
21
24
  export class ConfigurationManager {
22
- static async add(llmsConfig: TenexLLMsWithProviders, isFirstConfig = false): Promise<void> {
25
+ static async add(llmsConfig: TenexLLMsWithProviders, advanced = false): Promise<void> {
23
26
  const configuredProviders = Object.keys(llmsConfig.providers).filter(
24
- (p) => hasApiKey(llmsConfig.providers[p]?.apiKey)
27
+ (p) => {
28
+ const key = llmsConfig.providers[p]?.apiKey;
29
+ return hasApiKey(key) || key === "none";
30
+ }
25
31
  );
26
32
 
27
33
  if (configuredProviders.length === 0) {
28
- console.log(
29
- chalk.yellow("⚠️ No providers configured. Please configure API keys first.")
30
- );
34
+ display.hint("No providers configured. Please configure API keys first.");
31
35
  return;
32
36
  }
33
37
 
@@ -41,11 +45,13 @@ export class ConfigurationManager {
41
45
  name: ProviderConfigUI.getProviderDisplayName(p),
42
46
  value: p,
43
47
  })),
48
+ theme: inquirerTheme,
44
49
  },
45
50
  ]);
46
51
 
47
52
  // Select model based on provider
48
53
  let model: string;
54
+ let modelDisplayName: string | undefined;
49
55
  let reasoningEffort: string | undefined;
50
56
 
51
57
  if (provider === "openrouter") {
@@ -53,68 +59,71 @@ export class ConfigurationManager {
53
59
  } else if (provider === "ollama") {
54
60
  model = await ModelSelector.selectOllamaModel();
55
61
  } else if (provider === PROVIDER_IDS.CODEX_APP_SERVER) {
56
- // For Codex, show available models with reasoning efforts
57
62
  const result = await ConfigurationManager.selectCodexModel();
58
63
  model = result.model;
59
64
  reasoningEffort = result.reasoningEffort;
60
65
  } else {
61
- const { inputModel } = await inquirer.prompt([
66
+ // Use models.dev list for Anthropic, OpenAI, and any other provider with data
67
+ const result = await ModelSelector.selectModelsDevModel(
68
+ provider,
69
+ ConfigurationManager.getDefaultModelForProvider(provider as AISdkProvider)
70
+ );
71
+ model = result.id;
72
+ modelDisplayName = result.name;
73
+ }
74
+
75
+ // Advanced settings (temperature, maxTokens) only when --advanced
76
+ let temperature: string | undefined;
77
+ let maxTokens: string | undefined;
78
+
79
+ if (advanced) {
80
+ const advancedAnswers = await inquirer.prompt([
81
+ {
82
+ type: "input",
83
+ name: "temperature",
84
+ message: "Temperature (0-2, press enter to skip):",
85
+ validate: (input: string) => {
86
+ if (!input) return true;
87
+ const num = Number.parseFloat(input);
88
+ if (Number.isNaN(num) || num < 0 || num > 2)
89
+ return "Temperature must be between 0 and 2";
90
+ return true;
91
+ },
92
+ theme: inquirerTheme,
93
+ },
62
94
  {
63
95
  type: "input",
64
- name: "inputModel",
65
- message: "Enter model name:",
66
- default: ConfigurationManager.getDefaultModelForProvider(
67
- provider as AISdkProvider
68
- ),
96
+ name: "maxTokens",
97
+ message: "Max tokens (press enter to skip):",
69
98
  validate: (input: string) => {
70
- if (!input.trim()) return "Model name is required";
99
+ if (!input) return true;
100
+ const num = Number.parseInt(input);
101
+ if (Number.isNaN(num) || num <= 0)
102
+ return "Max tokens must be a positive number";
71
103
  return true;
72
104
  },
105
+ theme: inquirerTheme,
73
106
  },
74
107
  ]);
75
- model = inputModel;
108
+ temperature = advancedAnswers.temperature;
109
+ maxTokens = advancedAnswers.maxTokens;
76
110
  }
77
111
 
78
- // Optional settings
79
- const { temperature, maxTokens } = await inquirer.prompt([
80
- {
81
- type: "input",
82
- name: "temperature",
83
- message: "Temperature (0-2, press enter to skip):",
84
- validate: (input: string) => {
85
- if (!input) return true;
86
- const num = Number.parseFloat(input);
87
- if (Number.isNaN(num) || num < 0 || num > 2)
88
- return "Temperature must be between 0 and 2";
89
- return true;
90
- },
91
- },
92
- {
93
- type: "input",
94
- name: "maxTokens",
95
- message: "Max tokens (press enter to skip):",
96
- validate: (input: string) => {
97
- if (!input) return true;
98
- const num = Number.parseInt(input);
99
- if (Number.isNaN(num) || num <= 0)
100
- return "Max tokens must be a positive number";
101
- return true;
102
- },
103
- },
104
- ]);
112
+ // Default config name to provider/humanName (e.g. "anthropic/Claude Sonnet 4.6")
113
+ const defaultName = `${provider}/${modelDisplayName || model}`;
105
114
 
106
- // Name the configuration
107
115
  const { name } = await inquirer.prompt([
108
116
  {
109
117
  type: "input",
110
118
  name: "name",
111
119
  message: "Configuration name:",
112
- default: isFirstConfig ? "default" : undefined,
120
+ default: defaultName,
113
121
  validate: (input: string) => {
114
122
  if (!input.trim()) return "Name is required";
115
123
  if (llmsConfig.configurations[input]) return "Configuration already exists";
116
124
  return true;
117
125
  },
126
+ theme: inquirerTheme,
118
127
  },
119
128
  ]);
120
129
 
@@ -130,480 +139,93 @@ export class ConfigurationManager {
130
139
 
131
140
  llmsConfig.configurations[name] = config;
132
141
 
133
- // Set as default if first or ask user
134
- if (isFirstConfig || !llmsConfig.default) {
142
+ // Auto-set as default if first config or no default exists
143
+ if (!llmsConfig.default || Object.keys(llmsConfig.configurations).length === 1) {
135
144
  llmsConfig.default = name;
136
- console.log(chalk.green(`✅ Configuration "${name}" created and set as default`));
145
+ display.success(`Configuration "${name}" created and set as default`);
137
146
  } else {
138
- const { setAsDefault } = await inquirer.prompt([
139
- {
140
- type: "confirm",
141
- name: "setAsDefault",
142
- message: "Set as default configuration?",
143
- default: false,
144
- },
145
- ]);
146
-
147
- if (setAsDefault) {
148
- llmsConfig.default = name;
149
- }
150
- console.log(chalk.green(`✅ Configuration "${name}" created`));
147
+ display.success(`Configuration "${name}" created`);
151
148
  }
152
149
  }
153
150
 
154
151
  /**
155
- * Create a meta model configuration with multiple variants
152
+ * Create a multi-modal configuration with multiple variants
156
153
  */
157
- static async addMetaModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
158
- // Get existing non-meta configurations to use as variants
154
+ static async addMultiModal(llmsConfig: TenexLLMsWithProviders): Promise<void> {
159
155
  const standardConfigs = Object.keys(llmsConfig.configurations).filter((name) => {
160
156
  const config = llmsConfig.configurations[name];
161
157
  return config.provider !== "meta";
162
158
  });
163
159
 
164
160
  if (standardConfigs.length < 2) {
165
- console.log(
166
- chalk.yellow(
167
- "⚠️ You need at least 2 standard LLM configurations to create a meta model."
168
- )
169
- );
170
- console.log(
171
- chalk.gray(" Create more configurations first with 'Add new configuration'.")
172
- );
161
+ display.hint("You need at least 2 standard LLM configurations to create a multi-modal configuration.");
162
+ display.context("Create more configurations first with 'Add new configuration'.");
173
163
  return;
174
164
  }
175
165
 
176
- console.log(chalk.cyan("\n=== Create Meta Model ===\n"));
177
- console.log(
178
- chalk.gray(
179
- "Meta models let you switch between different models using keywords.\n" +
180
- "For example, starting a message with 'ultrathink' can trigger a more powerful model.\n"
181
- )
166
+ display.blank();
167
+ display.step(0, 0, "Add Multi-Modal Configuration");
168
+ display.context(
169
+ "Multi-modal configurations let you switch between different models using keywords.\n" +
170
+ "For example, starting a message with 'ultrathink' can trigger a more powerful model."
182
171
  );
172
+ display.blank();
183
173
 
184
- // Get meta model name
185
174
  const { metaName } = await inquirer.prompt([
186
175
  {
187
176
  type: "input",
188
177
  name: "metaName",
189
- message: "Meta model name:",
178
+ message: "Multi-modal configuration name:",
190
179
  validate: (input: string) => {
191
180
  if (!input.trim()) return "Name is required";
192
181
  if (llmsConfig.configurations[input]) return "Configuration already exists";
193
182
  return true;
194
183
  },
184
+ theme: inquirerTheme,
195
185
  },
196
186
  ]);
197
187
 
198
- // Get optional description
199
- const { description } = await inquirer.prompt([
200
- {
201
- type: "input",
202
- name: "description",
203
- message: "Description (shown in system prompt, press enter to skip):",
204
- },
205
- ]);
206
-
207
- // Create variants
208
- const variants: Record<string, MetaModelVariant> = {};
209
- let addMoreVariants = true;
210
- let variantCount = 0;
211
-
212
- console.log(chalk.cyan("\nNow let's add variants to your meta model.\n"));
213
-
214
- while (addMoreVariants) {
215
- variantCount++;
216
- console.log(chalk.bold(`\n--- Variant ${variantCount} ---`));
217
-
218
- // Variant name
219
- const { variantName } = await inquirer.prompt([
220
- {
221
- type: "input",
222
- name: "variantName",
223
- message: "Variant name (e.g., 'fast', 'standard', 'deep'):",
224
- validate: (input: string) => {
225
- if (!input.trim()) return "Name is required";
226
- if (variants[input]) return "Variant already exists";
227
- return true;
228
- },
229
- },
230
- ]);
231
-
232
- // Select underlying model
233
- const { model } = await inquirer.prompt([
234
- {
235
- type: "select",
236
- name: "model",
237
- message: "Select underlying model for this variant:",
238
- choices: standardConfigs.map((n) => ({
239
- name: n,
240
- value: n,
241
- })),
242
- },
243
- ]);
244
-
245
- // Keywords (comma-separated)
246
- const { keywordsInput } = await inquirer.prompt([
247
- {
248
- type: "input",
249
- name: "keywordsInput",
250
- message: "Trigger keywords (comma-separated, e.g., 'think,ponder'):",
251
- },
252
- ]);
253
-
254
- const keywords = keywordsInput
255
- ? keywordsInput
256
- .split(",")
257
- .map((k: string) => k.trim().toLowerCase())
258
- .filter((k: string) => k.length > 0)
259
- : [];
260
-
261
- // Variant description
262
- const { variantDescription } = await inquirer.prompt([
263
- {
264
- type: "input",
265
- name: "variantDescription",
266
- message: "Variant description (shown in system prompt):",
267
- },
268
- ]);
269
-
270
- // Tier (priority)
271
- const { tier } = await inquirer.prompt([
272
- {
273
- type: "number",
274
- name: "tier",
275
- message: "Priority tier (higher = higher priority when keywords conflict):",
276
- default: variantCount,
277
- },
278
- ]);
279
-
280
- // Optional system prompt
281
- const { variantSystemPrompt } = await inquirer.prompt([
282
- {
283
- type: "input",
284
- name: "variantSystemPrompt",
285
- message: "Additional system prompt (press enter to skip):",
286
- },
287
- ]);
288
-
289
- // Create variant
290
- const variant: MetaModelVariant = {
291
- model,
292
- };
293
-
294
- if (keywords.length > 0) variant.keywords = keywords;
295
- if (variantDescription) variant.description = variantDescription;
296
- if (tier) variant.tier = tier;
297
- if (variantSystemPrompt) variant.systemPrompt = variantSystemPrompt;
298
-
299
- variants[variantName] = variant;
300
-
301
- console.log(chalk.green(`✓ Added variant "${variantName}" → ${model}`));
302
-
303
- // Ask if more variants
304
- if (variantCount >= 2) {
305
- const { addMore } = await inquirer.prompt([
306
- {
307
- type: "confirm",
308
- name: "addMore",
309
- message: "Add another variant?",
310
- default: false,
311
- },
312
- ]);
313
- addMoreVariants = addMore;
314
- } else {
315
- console.log(chalk.gray("(Meta models need at least 2 variants)"));
316
- }
317
- }
318
-
319
- // Select default variant
320
- const variantNames = Object.keys(variants);
321
- const { defaultVariant } = await inquirer.prompt([
322
- {
323
- type: "select",
324
- name: "defaultVariant",
325
- message: "Select default variant (used when no keyword matches):",
326
- choices: variantNames.map((n) => ({
327
- name: n,
328
- value: n,
329
- })),
330
- },
331
- ]);
332
-
333
- // Create meta model configuration
334
- const metaConfig: MetaModelConfiguration = {
335
- provider: "meta",
336
- variants,
337
- default: defaultVariant,
338
- };
339
-
340
- if (description) metaConfig.description = description;
188
+ const metaConfig = await variantListPrompt(metaName, standardConfigs);
341
189
 
342
190
  llmsConfig.configurations[metaName] = metaConfig;
343
191
 
344
- // Ask if should be set as default
345
- const { setAsDefault } = await inquirer.prompt([
346
- {
347
- type: "confirm",
348
- name: "setAsDefault",
349
- message: "Set as default configuration?",
350
- default: false,
351
- },
352
- ]);
353
-
354
- if (setAsDefault) {
192
+ if (!llmsConfig.default) {
355
193
  llmsConfig.default = metaName;
356
194
  }
357
195
 
358
- console.log(chalk.green(`\n✅ Meta model "${metaName}" created with ${variantCount} variants`));
359
-
360
- // Show summary
361
- console.log(chalk.cyan("\nVariant summary:"));
362
- for (const [name, variant] of Object.entries(variants)) {
363
- const keywords = variant.keywords?.length ? ` (triggers: ${variant.keywords.join(", ")})` : "";
364
- const isDefault = name === defaultVariant ? chalk.yellow(" [default]") : "";
365
- console.log(chalk.gray(` • ${name} → ${variant.model}${keywords}${isDefault}`));
366
- }
367
- }
368
-
369
- static async delete(llmsConfig: TenexLLMsWithProviders): Promise<void> {
370
- const configNames = Object.keys(llmsConfig.configurations);
371
-
372
- if (configNames.length === 0) {
373
- console.log(chalk.yellow("⚠️ No configurations to delete"));
374
- return;
375
- }
376
-
377
- const { name } = await inquirer.prompt([
378
- {
379
- type: "select",
380
- name: "name",
381
- message: "Select configuration to delete:",
382
- choices: configNames.map((n) => ({
383
- name: n === llmsConfig.default ? `${n} (default)` : n,
384
- value: n,
385
- })),
386
- },
387
- ]);
388
-
389
- const { confirm } = await inquirer.prompt([
390
- {
391
- type: "confirm",
392
- name: "confirm",
393
- message: `Are you sure you want to delete "${name}"?`,
394
- default: false,
395
- },
396
- ]);
397
-
398
- if (confirm) {
399
- delete llmsConfig.configurations[name];
400
-
401
- // Update default if needed
402
- if (llmsConfig.default === name) {
403
- const remaining = Object.keys(llmsConfig.configurations);
404
- llmsConfig.default = remaining.length > 0 ? remaining[0] : undefined;
405
-
406
- if (llmsConfig.default) {
407
- console.log(chalk.yellow(`Default changed to "${llmsConfig.default}"`));
408
- }
409
- }
410
-
411
- console.log(chalk.green(`✅ Configuration "${name}" deleted`));
412
- }
413
- }
414
-
415
- static async setDefault(llmsConfig: TenexLLMsWithProviders): Promise<void> {
416
- const configNames = Object.keys(llmsConfig.configurations);
417
-
418
- if (configNames.length === 0) {
419
- console.log(chalk.yellow("⚠️ No configurations available"));
420
- return;
421
- }
422
-
423
- const { name } = await inquirer.prompt([
424
- {
425
- type: "select",
426
- name: "name",
427
- message: "Select default configuration:",
428
- choices: configNames.map((n) => ({
429
- name: n === llmsConfig.default ? `${n} (current default)` : n,
430
- value: n,
431
- })),
432
- },
433
- ]);
434
-
435
- llmsConfig.default = name;
436
- console.log(chalk.green(`✅ Default configuration set to "${name}"`));
437
- }
438
-
439
- static async setSummarizationModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
440
- const configNames = Object.keys(llmsConfig.configurations);
441
-
442
- if (configNames.length === 0) {
443
- console.log(chalk.yellow("⚠️ No configurations available"));
444
- return;
445
- }
446
-
447
- const { name } = await inquirer.prompt([
448
- {
449
- type: "select",
450
- name: "name",
451
- message: "Select summarization model:",
452
- choices: configNames.map((n) => ({
453
- name: n === llmsConfig.summarization ? `${n} (current)` : n,
454
- value: n,
455
- })),
456
- },
457
- ]);
458
-
459
- llmsConfig.summarization = name;
460
- console.log(chalk.green(`✅ Summarization model set to "${name}"`));
461
-
462
- // Offer to test the configuration
463
- const { shouldTest } = await inquirer.prompt([
464
- {
465
- type: "confirm",
466
- name: "shouldTest",
467
- message: "Would you like to test this configuration with generateObject?",
468
- default: true,
469
- },
470
- ]);
471
-
472
- if (shouldTest) {
473
- const { ConfigurationTester } = await import("./ConfigurationTester");
474
- await ConfigurationTester.testSummarization(llmsConfig, name);
475
- }
476
- }
477
-
478
- static async setSupervisionModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
479
- const configNames = Object.keys(llmsConfig.configurations);
480
-
481
- if (configNames.length === 0) {
482
- console.log(chalk.yellow("⚠️ No configurations available"));
483
- return;
484
- }
485
-
486
- const { name } = await inquirer.prompt([
487
- {
488
- type: "select",
489
- name: "name",
490
- message: "Select supervision model:",
491
- choices: configNames.map((n) => ({
492
- name: n === llmsConfig.supervision ? `${n} (current)` : n,
493
- value: n,
494
- })),
495
- },
496
- ]);
497
-
498
- llmsConfig.supervision = name;
499
- console.log(chalk.green(`✅ Supervision model set to "${name}"`));
500
- }
501
-
502
- static async setSearchModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
503
- const configNames = Object.keys(llmsConfig.configurations);
504
-
505
- if (configNames.length === 0) {
506
- console.log(chalk.yellow("⚠️ No configurations available"));
507
- return;
508
- }
509
-
510
- const { name } = await inquirer.prompt([
511
- {
512
- type: "select",
513
- name: "name",
514
- message: "Select search model:",
515
- choices: configNames.map((n) => ({
516
- name: n === llmsConfig.search ? `${n} (current)` : n,
517
- value: n,
518
- })),
519
- },
520
- ]);
521
-
522
- llmsConfig.search = name;
523
- console.log(chalk.green(`✅ Search model set to "${name}"`));
524
- }
525
-
526
- static async setPromptCompilationModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
527
- const configNames = Object.keys(llmsConfig.configurations);
528
-
529
- if (configNames.length === 0) {
530
- console.log(chalk.yellow("⚠️ No configurations available"));
531
- return;
532
- }
533
-
534
- const { name } = await inquirer.prompt([
535
- {
536
- type: "select",
537
- name: "name",
538
- message: "Select prompt compilation model:",
539
- choices: configNames.map((n) => ({
540
- name: n === llmsConfig.promptCompilation ? `${n} (current)` : n,
541
- value: n,
542
- })),
543
- },
544
- ]);
545
-
546
- llmsConfig.promptCompilation = name;
547
- console.log(chalk.green(`✅ Prompt compilation model set to "${name}"`));
548
- console.log(chalk.gray(" This model is used to compile lessons into agent system prompts."));
549
- }
550
-
551
- static async setCompressionModel(llmsConfig: TenexLLMsWithProviders): Promise<void> {
552
- const configNames = Object.keys(llmsConfig.configurations);
553
-
554
- if (configNames.length === 0) {
555
- console.log(chalk.yellow("⚠️ No configurations available"));
556
- return;
557
- }
558
-
559
- const { name } = await inquirer.prompt([
560
- {
561
- type: "select",
562
- name: "name",
563
- message: "Select compression model:",
564
- choices: configNames.map((n) => ({
565
- name: n === llmsConfig.compression ? `${n} (current)` : n,
566
- value: n,
567
- })),
568
- },
569
- ]);
570
-
571
- llmsConfig.compression = name;
572
- console.log(chalk.green(`✅ Compression model set to "${name}"`));
573
- console.log(chalk.gray(" This model is used for conversation history compression."));
196
+ const variantCount = Object.keys(metaConfig.variants).length;
197
+ display.blank();
198
+ display.success(`Multi-modal configuration "${metaName}" created with ${variantCount} variants`);
574
199
  }
575
200
 
576
201
  /**
577
202
  * Select a Codex model and reasoning effort interactively
578
203
  */
579
204
  private static async selectCodexModel(): Promise<{ model: string; reasoningEffort?: string }> {
580
- console.log(chalk.cyan("\nFetching available Codex models..."));
205
+ display.blank();
206
+ display.context("Fetching available Codex models...");
581
207
 
582
208
  try {
583
209
  const models = await listCodexModels();
584
210
 
585
211
  if (models.length === 0) {
586
- console.log(chalk.yellow("No models found. Using default."));
212
+ display.hint("No models found. Using default.");
587
213
  return { model: "gpt-5.1-codex-max" };
588
214
  }
589
215
 
590
- // Show models with details
591
- console.log(chalk.bold("\nAvailable Codex Models:"));
592
- for (const model of models) {
593
- console.log(chalk.gray(formatCodexModel(model)));
594
- }
595
- console.log("");
596
-
597
- // Select model
598
216
  const { model } = await inquirer.prompt([
599
217
  {
600
218
  type: "select",
601
219
  name: "model",
602
220
  message: "Select model:",
603
- choices: models.map((m) => ({
604
- name: m.isDefault ? `${m.displayName} (default)` : m.displayName,
605
- value: m.id,
606
- })),
221
+ choices: models.map((m) => {
222
+ const defaultTag = m.isDefault ? chalk.dim(" (default)") : "";
223
+ return {
224
+ name: `${m.id}${defaultTag} ${chalk.dim(m.description)}`,
225
+ value: m.id,
226
+ };
227
+ }),
228
+ theme: inquirerTheme,
607
229
  },
608
230
  ]);
609
231
 
@@ -626,12 +248,13 @@ export class ConfigurationManager {
626
248
  value: e,
627
249
  })),
628
250
  ],
251
+ theme: inquirerTheme,
629
252
  },
630
253
  ]);
631
254
 
632
255
  return { model, reasoningEffort };
633
256
  } catch (error) {
634
- console.log(chalk.yellow(`Could not fetch models: ${error}. Using default.`));
257
+ display.hint(`Could not fetch models: ${error}. Using default.`);
635
258
  return { model: "gpt-5.1-codex-max" };
636
259
  }
637
260
  }