@exagent/agent 0.3.4 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -79,6 +79,87 @@ function printError(message) {
79
79
  console.log(` ${pc.red("\u2717")} ${message}`);
80
80
  }
81
81
 
82
+ // src/llm-providers.ts
83
+ var LLM_PROVIDERS = [
84
+ {
85
+ id: "openai",
86
+ label: "OpenAI",
87
+ models: [
88
+ { id: "gpt-5.2", label: "GPT-5.2" },
89
+ { id: "gpt-5.2-pro", label: "GPT-5.2 Pro" },
90
+ { id: "gpt-5-mini", label: "GPT-5 Mini" },
91
+ { id: "gpt-5-nano", label: "GPT-5 Nano" },
92
+ { id: "gpt-4o", label: "GPT-4o" },
93
+ { id: "gpt-4o-mini", label: "GPT-4o Mini" }
94
+ ]
95
+ },
96
+ {
97
+ id: "anthropic",
98
+ label: "Anthropic",
99
+ models: [
100
+ { id: "claude-opus-4-6", label: "Claude Opus 4.6" },
101
+ { id: "claude-sonnet-4-6", label: "Claude Sonnet 4.6" },
102
+ { id: "claude-haiku-4-5", label: "Claude Haiku 4.5" }
103
+ ]
104
+ },
105
+ {
106
+ id: "google",
107
+ label: "Google",
108
+ models: [
109
+ { id: "gemini-3-pro", label: "Gemini 3 Pro" },
110
+ { id: "gemini-3-flash", label: "Gemini 3 Flash" },
111
+ { id: "gemini-2.5-pro", label: "Gemini 2.5 Pro" },
112
+ { id: "gemini-2.5-flash", label: "Gemini 2.5 Flash" },
113
+ { id: "gemini-2.5-flash-lite", label: "Gemini 2.5 Flash Lite" }
114
+ ]
115
+ },
116
+ {
117
+ id: "deepseek",
118
+ label: "DeepSeek",
119
+ models: [
120
+ { id: "deepseek-chat", label: "DeepSeek Chat" },
121
+ { id: "deepseek-reasoner", label: "DeepSeek Reasoner" }
122
+ ]
123
+ },
124
+ {
125
+ id: "mistral",
126
+ label: "Mistral",
127
+ models: [
128
+ { id: "mistral-large-latest", label: "Mistral Large" },
129
+ { id: "mistral-small-latest", label: "Mistral Small" }
130
+ ]
131
+ },
132
+ {
133
+ id: "groq",
134
+ label: "Groq",
135
+ models: [
136
+ { id: "llama-3.3-70b-versatile", label: "Llama 3.3 70B" },
137
+ { id: "llama-3.1-8b-instant", label: "Llama 3.1 8B" },
138
+ { id: "mixtral-8x7b-32768", label: "Mixtral 8x7B" }
139
+ ]
140
+ },
141
+ {
142
+ id: "together",
143
+ label: "Together",
144
+ models: [
145
+ { id: "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", label: "Llama 3.1 70B" },
146
+ { id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", label: "Llama 3.1 8B" }
147
+ ]
148
+ },
149
+ {
150
+ id: "ollama",
151
+ label: "Ollama (local)",
152
+ models: [
153
+ { id: "llama3.1", label: "Llama 3.1" },
154
+ { id: "mistral", label: "Mistral" },
155
+ { id: "custom", label: "Custom (type model name)" }
156
+ ]
157
+ }
158
+ ];
159
+ function getProvider(id) {
160
+ return LLM_PROVIDERS.find((p) => p.id === id);
161
+ }
162
+
82
163
  // src/setup.ts
83
164
  function expandHomeDir(path) {
84
165
  if (!path.startsWith("~/")) return path;
@@ -173,7 +254,6 @@ async function setupWallet(config) {
173
254
  printDone(`Wallet imported: ${pc.dim(address)}`);
174
255
  return privateKey;
175
256
  }
176
- var LLM_PROVIDERS = ["openai", "anthropic", "google", "deepseek", "mistral", "groq", "together", "ollama"];
177
257
  async function setupLlm(config) {
178
258
  if (isNonInteractive()) {
179
259
  const provider2 = process.env.EXAGENT_LLM_PROVIDER || config.llm?.provider;
@@ -186,7 +266,7 @@ async function setupLlm(config) {
186
266
  return { provider: provider2, model: model2, apiKey: apiKey2 };
187
267
  }
188
268
  const defaultProvider = config.llm?.provider;
189
- const providerOptions = LLM_PROVIDERS.map((p) => ({ value: p, label: p }));
269
+ const providerOptions = LLM_PROVIDERS.map((p) => ({ value: p.id, label: p.label }));
190
270
  const selected = await clack.select({
191
271
  message: "LLM provider:",
192
272
  options: providerOptions,
@@ -195,16 +275,15 @@ async function setupLlm(config) {
195
275
  if (clack.isCancel(selected)) cancelled();
196
276
  const provider = selected;
197
277
  const defaultModel = config.llm?.model;
198
- const entered = await clack.text({
278
+ const providerInfo = getProvider(provider);
279
+ const modelOptions = providerInfo ? providerInfo.models.map((m) => ({ value: m.id, label: m.label })) : [{ value: defaultModel || "gpt-4o", label: defaultModel || "gpt-4o" }];
280
+ const selectedModel = await clack.select({
199
281
  message: "LLM model:",
200
- placeholder: defaultModel || "gpt-4o",
201
- defaultValue: defaultModel || void 0,
202
- validate: (val) => {
203
- if (!val.trim()) return "Model name is required.";
204
- }
282
+ options: modelOptions,
283
+ initialValue: defaultModel || void 0
205
284
  });
206
- if (clack.isCancel(entered)) cancelled();
207
- const model = entered;
285
+ if (clack.isCancel(selectedModel)) cancelled();
286
+ const model = selectedModel;
208
287
  const apiKey = await clack.password({
209
288
  message: "LLM API key:",
210
289
  validate: (val) => validateLlmKeyFormat(provider, val)
@@ -344,7 +423,7 @@ async function ensureLocalSetup(configPath) {
344
423
  // src/cli.ts
345
424
  import * as clack2 from "@clack/prompts";
346
425
  var program = new Command();
347
- program.name("exagent").description("Exagent \u2014 LLM trading agent runtime").version("0.3.4");
426
+ program.name("exagent").description("Exagent \u2014 LLM trading agent runtime").version("0.3.5");
348
427
  program.command("init").description("Create a sample agent configuration file").option("--agent-id <id>", "Agent ID (from dashboard)", "my-agent").option("--api-url <url>", "API server URL", "http://localhost:3002").option("--config <path>", "Config file path", "agent-config.json").action((opts) => {
349
428
  printBanner();
350
429
  writeSampleConfig(opts.agentId, opts.apiUrl, opts.config);
@@ -420,7 +499,6 @@ program.command("status").description("Check agent status").option("--config <pa
420
499
  process.exit(1);
421
500
  }
422
501
  });
423
- var LLM_PROVIDERS2 = ["openai", "anthropic", "google", "deepseek", "mistral", "groq", "together", "ollama"];
424
502
  program.command("config").description("Change LLM provider, model, or API key").option("--config <path>", "Config file path", "agent-config.json").action(async (opts) => {
425
503
  try {
426
504
  printBanner();
@@ -459,7 +537,7 @@ program.command("config").description("Change LLM provider, model, or API key").
459
537
  if (action === "all") {
460
538
  const selectedProvider = await clack2.select({
461
539
  message: "LLM provider:",
462
- options: LLM_PROVIDERS2.map((p) => ({ value: p, label: p })),
540
+ options: LLM_PROVIDERS.map((p) => ({ value: p.id, label: p.label })),
463
541
  initialValue: currentProvider || void 0
464
542
  });
465
543
  if (clack2.isCancel(selectedProvider)) {
@@ -467,18 +545,18 @@ program.command("config").description("Change LLM provider, model, or API key").
467
545
  process.exit(0);
468
546
  }
469
547
  newProvider = selectedProvider;
470
- const enteredModel = await clack2.text({
548
+ const provider = getProvider(newProvider);
549
+ const modelOptions = provider ? provider.models.map((m) => ({ value: m.id, label: m.label })) : [{ value: currentModel, label: currentModel }];
550
+ const selectedModel = await clack2.select({
471
551
  message: "LLM model:",
472
- defaultValue: currentModel,
473
- validate: (val) => {
474
- if (!val?.trim()) return "Model name is required.";
475
- }
552
+ options: modelOptions,
553
+ initialValue: currentModel || void 0
476
554
  });
477
- if (clack2.isCancel(enteredModel)) {
555
+ if (clack2.isCancel(selectedModel)) {
478
556
  clack2.cancel("Cancelled.");
479
557
  process.exit(0);
480
558
  }
481
- newModel = enteredModel;
559
+ newModel = selectedModel;
482
560
  }
483
561
  const newKey = await clack2.password({
484
562
  message: "New LLM API key:",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@exagent/agent",
3
- "version": "0.3.4",
3
+ "version": "0.3.5",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
package/src/cli.ts CHANGED
@@ -12,7 +12,7 @@ const program = new Command();
12
12
  program
13
13
  .name('exagent')
14
14
  .description('Exagent — LLM trading agent runtime')
15
- .version('0.3.4');
15
+ .version('0.3.5');
16
16
 
17
17
  program
18
18
  .command('init')
@@ -120,7 +120,7 @@ program
120
120
  }
121
121
  });
122
122
 
123
- const LLM_PROVIDERS = ['openai', 'anthropic', 'google', 'deepseek', 'mistral', 'groq', 'together', 'ollama'] as const;
123
+ import { LLM_PROVIDERS, getProvider } from './llm-providers.js';
124
124
 
125
125
  program
126
126
  .command('config')
@@ -170,13 +170,13 @@ program
170
170
  process.exit(0);
171
171
  }
172
172
 
173
- let newProvider = currentProvider;
174
- let newModel = currentModel;
173
+ let newProvider: string = currentProvider;
174
+ let newModel: string = currentModel;
175
175
 
176
176
  if (action === 'all') {
177
177
  const selectedProvider = await clack.select({
178
178
  message: 'LLM provider:',
179
- options: LLM_PROVIDERS.map(p => ({ value: p, label: p })),
179
+ options: LLM_PROVIDERS.map(p => ({ value: p.id, label: p.label })),
180
180
  initialValue: currentProvider || undefined,
181
181
  });
182
182
  if (clack.isCancel(selectedProvider)) {
@@ -185,18 +185,21 @@ program
185
185
  }
186
186
  newProvider = selectedProvider;
187
187
 
188
- const enteredModel = await clack.text({
188
+ const provider = getProvider(newProvider);
189
+ const modelOptions = provider
190
+ ? provider.models.map(m => ({ value: m.id, label: m.label }))
191
+ : [{ value: currentModel, label: currentModel }];
192
+
193
+ const selectedModel = await clack.select({
189
194
  message: 'LLM model:',
190
- defaultValue: currentModel,
191
- validate: (val) => {
192
- if (!val?.trim()) return 'Model name is required.';
193
- },
195
+ options: modelOptions,
196
+ initialValue: currentModel || undefined,
194
197
  });
195
- if (clack.isCancel(enteredModel)) {
198
+ if (clack.isCancel(selectedModel)) {
196
199
  clack.cancel('Cancelled.');
197
200
  process.exit(0);
198
201
  }
199
- newModel = enteredModel;
202
+ newModel = selectedModel;
200
203
  }
201
204
 
202
205
  const newKey = await clack.password({
@@ -0,0 +1,100 @@
1
+ /**
2
+ * LLM provider and model registry.
3
+ * Keep in sync with apps/web/src/lib/llm-providers.ts.
4
+ */
5
+
6
+ export interface LlmModel {
7
+ id: string;
8
+ label: string;
9
+ }
10
+
11
+ export interface LlmProvider {
12
+ id: string;
13
+ label: string;
14
+ models: LlmModel[];
15
+ }
16
+
17
+ export const LLM_PROVIDERS: LlmProvider[] = [
18
+ {
19
+ id: 'openai',
20
+ label: 'OpenAI',
21
+ models: [
22
+ { id: 'gpt-5.2', label: 'GPT-5.2' },
23
+ { id: 'gpt-5.2-pro', label: 'GPT-5.2 Pro' },
24
+ { id: 'gpt-5-mini', label: 'GPT-5 Mini' },
25
+ { id: 'gpt-5-nano', label: 'GPT-5 Nano' },
26
+ { id: 'gpt-4o', label: 'GPT-4o' },
27
+ { id: 'gpt-4o-mini', label: 'GPT-4o Mini' },
28
+ ],
29
+ },
30
+ {
31
+ id: 'anthropic',
32
+ label: 'Anthropic',
33
+ models: [
34
+ { id: 'claude-opus-4-6', label: 'Claude Opus 4.6' },
35
+ { id: 'claude-sonnet-4-6', label: 'Claude Sonnet 4.6' },
36
+ { id: 'claude-haiku-4-5', label: 'Claude Haiku 4.5' },
37
+ ],
38
+ },
39
+ {
40
+ id: 'google',
41
+ label: 'Google',
42
+ models: [
43
+ { id: 'gemini-3-pro', label: 'Gemini 3 Pro' },
44
+ { id: 'gemini-3-flash', label: 'Gemini 3 Flash' },
45
+ { id: 'gemini-2.5-pro', label: 'Gemini 2.5 Pro' },
46
+ { id: 'gemini-2.5-flash', label: 'Gemini 2.5 Flash' },
47
+ { id: 'gemini-2.5-flash-lite', label: 'Gemini 2.5 Flash Lite' },
48
+ ],
49
+ },
50
+ {
51
+ id: 'deepseek',
52
+ label: 'DeepSeek',
53
+ models: [
54
+ { id: 'deepseek-chat', label: 'DeepSeek Chat' },
55
+ { id: 'deepseek-reasoner', label: 'DeepSeek Reasoner' },
56
+ ],
57
+ },
58
+ {
59
+ id: 'mistral',
60
+ label: 'Mistral',
61
+ models: [
62
+ { id: 'mistral-large-latest', label: 'Mistral Large' },
63
+ { id: 'mistral-small-latest', label: 'Mistral Small' },
64
+ ],
65
+ },
66
+ {
67
+ id: 'groq',
68
+ label: 'Groq',
69
+ models: [
70
+ { id: 'llama-3.3-70b-versatile', label: 'Llama 3.3 70B' },
71
+ { id: 'llama-3.1-8b-instant', label: 'Llama 3.1 8B' },
72
+ { id: 'mixtral-8x7b-32768', label: 'Mixtral 8x7B' },
73
+ ],
74
+ },
75
+ {
76
+ id: 'together',
77
+ label: 'Together',
78
+ models: [
79
+ { id: 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo', label: 'Llama 3.1 70B' },
80
+ { id: 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo', label: 'Llama 3.1 8B' },
81
+ ],
82
+ },
83
+ {
84
+ id: 'ollama',
85
+ label: 'Ollama (local)',
86
+ models: [
87
+ { id: 'llama3.1', label: 'Llama 3.1' },
88
+ { id: 'mistral', label: 'Mistral' },
89
+ { id: 'custom', label: 'Custom (type model name)' },
90
+ ],
91
+ },
92
+ ];
93
+
94
+ export function getProvider(id: string): LlmProvider | undefined {
95
+ return LLM_PROVIDERS.find((p) => p.id === id);
96
+ }
97
+
98
+ export function getProviderIds(): string[] {
99
+ return LLM_PROVIDERS.map((p) => p.id);
100
+ }
package/src/setup.ts CHANGED
@@ -146,7 +146,7 @@ async function setupWallet(config: RuntimeConfigFile): Promise<string> {
146
146
  // Step 3: LLM
147
147
  // ---------------------------------------------------------------------------
148
148
 
149
- const LLM_PROVIDERS = ['openai', 'anthropic', 'google', 'deepseek', 'mistral', 'groq', 'together', 'ollama'] as const;
149
+ import { LLM_PROVIDERS, getProvider } from './llm-providers.js';
150
150
 
151
151
  async function setupLlm(
152
152
  config: RuntimeConfigFile,
@@ -168,7 +168,7 @@ async function setupLlm(
168
168
 
169
169
  // Provider — use config as default selection if available
170
170
  const defaultProvider = config.llm?.provider;
171
- const providerOptions = LLM_PROVIDERS.map(p => ({ value: p, label: p }));
171
+ const providerOptions = LLM_PROVIDERS.map(p => ({ value: p.id, label: p.label }));
172
172
  const selected = await clack.select({
173
173
  message: 'LLM provider:',
174
174
  options: providerOptions,
@@ -177,18 +177,19 @@ async function setupLlm(
177
177
  if (clack.isCancel(selected)) cancelled();
178
178
  const provider = selected;
179
179
 
180
- // Model — use config as default/placeholder if available
180
+ // Model — show available models for the selected provider
181
181
  const defaultModel = config.llm?.model;
182
- const entered = await clack.text({
182
+ const providerInfo = getProvider(provider);
183
+ const modelOptions = providerInfo
184
+ ? providerInfo.models.map(m => ({ value: m.id, label: m.label }))
185
+ : [{ value: defaultModel || 'gpt-4o', label: defaultModel || 'gpt-4o' }];
186
+ const selectedModel = await clack.select({
183
187
  message: 'LLM model:',
184
- placeholder: defaultModel || 'gpt-4o',
185
- defaultValue: defaultModel || undefined,
186
- validate: (val) => {
187
- if (!val.trim()) return 'Model name is required.';
188
- },
188
+ options: modelOptions,
189
+ initialValue: defaultModel || undefined,
189
190
  });
190
- if (clack.isCancel(entered)) cancelled();
191
- const model = entered;
191
+ if (clack.isCancel(selectedModel)) cancelled();
192
+ const model = selectedModel;
192
193
 
193
194
  // API Key — always prompt, never from bootstrap
194
195
  const apiKey = await clack.password({