@terminusagents/agents 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -8
- package/dist/cli/doctor.js +105 -3
- package/dist/cli/init.d.ts +1 -1
- package/dist/cli/init.js +115 -32
- package/dist/cli/run.js +14 -1
- package/dist/cli/status.js +47 -6
- package/dist/config/store.d.ts +2 -1
- package/dist/config/store.js +30 -5
- package/dist/index.js +1 -1
- package/dist/llm/provider.d.ts +32 -1
- package/dist/llm/provider.js +233 -16
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -4,7 +4,7 @@ Run a Terminus agent on your machine, receive jobs from the control plane, and e
|
|
|
4
4
|
|
|
5
5
|
This package supports:
|
|
6
6
|
- challenge-signature websocket auth
|
|
7
|
-
- Grok / Ollama / OpenAI-compatible providers
|
|
7
|
+
- Grok / OpenAI / Claude / Gemini / Ollama / OpenAI-compatible providers
|
|
8
8
|
- testnet/mainnet/local network profiles
|
|
9
9
|
- one-command diagnostics with `doctor`
|
|
10
10
|
|
|
@@ -58,7 +58,11 @@ npx terminus-agent init
|
|
|
58
58
|
|
|
59
59
|
```bash
|
|
60
60
|
export TERMINUS_WALLET_PRIVATE_KEY=0x...
|
|
61
|
-
|
|
61
|
+
# Use one provider key based on your llmProvider choice:
|
|
62
|
+
export TERMINUS_GROK_API_KEY=xai-... # grok
|
|
63
|
+
export TERMINUS_OPENAI_API_KEY=sk-... # openai
|
|
64
|
+
export TERMINUS_ANTHROPIC_API_KEY=sk-ant-... # anthropic (claude)
|
|
65
|
+
export TERMINUS_GOOGLE_API_KEY=AIza... # google (gemini)
|
|
62
66
|
```
|
|
63
67
|
|
|
64
68
|
4. Run diagnostics:
|
|
@@ -79,7 +83,7 @@ Use this for scripted onboarding:
|
|
|
79
83
|
|
|
80
84
|
```bash
|
|
81
85
|
export TERMINUS_WALLET_PRIVATE_KEY=0x...
|
|
82
|
-
export
|
|
86
|
+
export TERMINUS_OPENAI_API_KEY=sk-...
|
|
83
87
|
|
|
84
88
|
npx terminus-agent init \
|
|
85
89
|
--yes \
|
|
@@ -87,7 +91,8 @@ npx terminus-agent init \
|
|
|
87
91
|
--profile testnet \
|
|
88
92
|
--agent-type travel-planner \
|
|
89
93
|
--wallet 0x1234567890abcdef1234567890abcdef12345678 \
|
|
90
|
-
--llm-provider
|
|
94
|
+
--llm-provider openai \
|
|
95
|
+
--llm-model gpt-4o-mini
|
|
91
96
|
```
|
|
92
97
|
|
|
93
98
|
Then:
|
|
@@ -153,6 +158,10 @@ Example:
|
|
|
153
158
|
```
|
|
154
159
|
|
|
155
160
|
`"apiKey": "__ENV__"` means runtime key from `TERMINUS_GROK_API_KEY` or `XAI_API_KEY`.
|
|
161
|
+
For other cloud providers, runtime keys can also come from:
|
|
162
|
+
- OpenAI: `TERMINUS_OPENAI_API_KEY` or `OPENAI_API_KEY`
|
|
163
|
+
- Claude: `TERMINUS_ANTHROPIC_API_KEY` or `ANTHROPIC_API_KEY`
|
|
164
|
+
- Gemini: `TERMINUS_GOOGLE_API_KEY` or `GOOGLE_API_KEY` or `GEMINI_API_KEY`
|
|
156
165
|
|
|
157
166
|
## Troubleshooting
|
|
158
167
|
|
|
@@ -166,10 +175,12 @@ Example:
|
|
|
166
175
|
- verify private key signer address matches configured wallet
|
|
167
176
|
- rerun `npx terminus-agent init --force` if wallet changed
|
|
168
177
|
|
|
169
|
-
### `
|
|
170
|
-
- set
|
|
171
|
-
- `TERMINUS_GROK_API_KEY`
|
|
172
|
-
- `
|
|
178
|
+
### `Provider key missing`
|
|
179
|
+
- set the env key for your provider:
|
|
180
|
+
- Grok: `TERMINUS_GROK_API_KEY` or `XAI_API_KEY`
|
|
181
|
+
- OpenAI: `TERMINUS_OPENAI_API_KEY` or `OPENAI_API_KEY`
|
|
182
|
+
- Claude: `TERMINUS_ANTHROPIC_API_KEY` or `ANTHROPIC_API_KEY`
|
|
183
|
+
- Gemini: `TERMINUS_GOOGLE_API_KEY` or `GOOGLE_API_KEY` or `GEMINI_API_KEY`
|
|
173
184
|
- or store key directly during `init`
|
|
174
185
|
|
|
175
186
|
### `Ollama not reachable`
|
package/dist/cli/doctor.js
CHANGED
|
@@ -16,10 +16,25 @@ function isValidWallet(value) {
|
|
|
16
16
|
function isWsUrl(value) {
|
|
17
17
|
return value.startsWith('ws://') || value.startsWith('wss://');
|
|
18
18
|
}
|
|
19
|
-
function
|
|
19
|
+
function getRuntimeProviderKey(provider, storedApiKey) {
|
|
20
20
|
if (storedApiKey && storedApiKey !== '__ENV__')
|
|
21
21
|
return storedApiKey;
|
|
22
|
-
|
|
22
|
+
if (provider === 'grok') {
|
|
23
|
+
return process.env.TERMINUS_GROK_API_KEY?.trim() || process.env.XAI_API_KEY?.trim() || undefined;
|
|
24
|
+
}
|
|
25
|
+
if (provider === 'openai') {
|
|
26
|
+
return process.env.TERMINUS_OPENAI_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim() || undefined;
|
|
27
|
+
}
|
|
28
|
+
if (provider === 'anthropic') {
|
|
29
|
+
return process.env.TERMINUS_ANTHROPIC_API_KEY?.trim() || process.env.ANTHROPIC_API_KEY?.trim() || undefined;
|
|
30
|
+
}
|
|
31
|
+
if (provider === 'google') {
|
|
32
|
+
return (process.env.TERMINUS_GOOGLE_API_KEY?.trim()
|
|
33
|
+
|| process.env.GOOGLE_API_KEY?.trim()
|
|
34
|
+
|| process.env.GEMINI_API_KEY?.trim()
|
|
35
|
+
|| undefined);
|
|
36
|
+
}
|
|
37
|
+
return undefined;
|
|
23
38
|
}
|
|
24
39
|
async function checkWebSocketReachability(url, timeoutMs) {
|
|
25
40
|
return new Promise((resolve) => {
|
|
@@ -71,6 +86,69 @@ async function checkGrokApiKey(apiKey, fullCheck) {
|
|
|
71
86
|
return { ok: false, message: error.message };
|
|
72
87
|
}
|
|
73
88
|
}
|
|
89
|
+
async function checkOpenAiApiKey(apiKey, fullCheck) {
|
|
90
|
+
if (apiKey.length < 12) {
|
|
91
|
+
return { ok: false, message: 'Key format looks invalid (too short)' };
|
|
92
|
+
}
|
|
93
|
+
if (!fullCheck) {
|
|
94
|
+
return { ok: true, message: 'Key format looks valid' };
|
|
95
|
+
}
|
|
96
|
+
try {
|
|
97
|
+
const response = await fetch('https://api.openai.com/v1/models', {
|
|
98
|
+
headers: {
|
|
99
|
+
Authorization: `Bearer ${apiKey}`,
|
|
100
|
+
},
|
|
101
|
+
});
|
|
102
|
+
if (response.ok) {
|
|
103
|
+
return { ok: true, message: 'OpenAI API reachable and key accepted' };
|
|
104
|
+
}
|
|
105
|
+
return { ok: false, message: `OpenAI API returned ${response.status}` };
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
return { ok: false, message: error.message };
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
async function checkAnthropicApiKey(apiKey, fullCheck) {
|
|
112
|
+
if (apiKey.length < 12) {
|
|
113
|
+
return { ok: false, message: 'Key format looks invalid (too short)' };
|
|
114
|
+
}
|
|
115
|
+
if (!fullCheck) {
|
|
116
|
+
return { ok: true, message: 'Key format looks valid' };
|
|
117
|
+
}
|
|
118
|
+
try {
|
|
119
|
+
const response = await fetch('https://api.anthropic.com/v1/models', {
|
|
120
|
+
headers: {
|
|
121
|
+
'x-api-key': apiKey,
|
|
122
|
+
'anthropic-version': '2023-06-01',
|
|
123
|
+
},
|
|
124
|
+
});
|
|
125
|
+
if (response.ok) {
|
|
126
|
+
return { ok: true, message: 'Anthropic API reachable and key accepted' };
|
|
127
|
+
}
|
|
128
|
+
return { ok: false, message: `Anthropic API returned ${response.status}` };
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
return { ok: false, message: error.message };
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
async function checkGoogleApiKey(apiKey, fullCheck) {
|
|
135
|
+
if (apiKey.length < 12) {
|
|
136
|
+
return { ok: false, message: 'Key format looks invalid (too short)' };
|
|
137
|
+
}
|
|
138
|
+
if (!fullCheck) {
|
|
139
|
+
return { ok: true, message: 'Key format looks valid' };
|
|
140
|
+
}
|
|
141
|
+
try {
|
|
142
|
+
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`);
|
|
143
|
+
if (response.ok) {
|
|
144
|
+
return { ok: true, message: 'Google Gemini API reachable and key accepted' };
|
|
145
|
+
}
|
|
146
|
+
return { ok: false, message: `Google Gemini API returned ${response.status}` };
|
|
147
|
+
}
|
|
148
|
+
catch (error) {
|
|
149
|
+
return { ok: false, message: error.message };
|
|
150
|
+
}
|
|
151
|
+
}
|
|
74
152
|
async function checkOpenAiCompatible(baseUrl, fullCheck) {
|
|
75
153
|
if (!baseUrl) {
|
|
76
154
|
return { ok: false, message: 'Base URL is missing' };
|
|
@@ -150,13 +228,37 @@ export async function doctorCommand(options = {}) {
|
|
|
150
228
|
}
|
|
151
229
|
const provider = config.llmProvider || 'grok';
|
|
152
230
|
if (provider === 'grok') {
|
|
153
|
-
const key =
|
|
231
|
+
const key = getRuntimeProviderKey(provider, config.apiKey);
|
|
154
232
|
const result = key
|
|
155
233
|
? await checkGrokApiKey(key, fullCheck)
|
|
156
234
|
: { ok: false, message: 'No Grok key found (config or env)' };
|
|
157
235
|
printCheck('Grok provider', result);
|
|
158
236
|
allOk = allOk && result.ok;
|
|
159
237
|
}
|
|
238
|
+
else if (provider === 'openai') {
|
|
239
|
+
const key = getRuntimeProviderKey(provider, config.apiKey);
|
|
240
|
+
const result = key
|
|
241
|
+
? await checkOpenAiApiKey(key, fullCheck)
|
|
242
|
+
: { ok: false, message: 'No OpenAI key found (config or env)' };
|
|
243
|
+
printCheck('OpenAI provider', result);
|
|
244
|
+
allOk = allOk && result.ok;
|
|
245
|
+
}
|
|
246
|
+
else if (provider === 'anthropic') {
|
|
247
|
+
const key = getRuntimeProviderKey(provider, config.apiKey);
|
|
248
|
+
const result = key
|
|
249
|
+
? await checkAnthropicApiKey(key, fullCheck)
|
|
250
|
+
: { ok: false, message: 'No Anthropic key found (config or env)' };
|
|
251
|
+
printCheck('Anthropic provider', result);
|
|
252
|
+
allOk = allOk && result.ok;
|
|
253
|
+
}
|
|
254
|
+
else if (provider === 'google') {
|
|
255
|
+
const key = getRuntimeProviderKey(provider, config.apiKey);
|
|
256
|
+
const result = key
|
|
257
|
+
? await checkGoogleApiKey(key, fullCheck)
|
|
258
|
+
: { ok: false, message: 'No Google key found (config or env)' };
|
|
259
|
+
printCheck('Google provider', result);
|
|
260
|
+
allOk = allOk && result.ok;
|
|
261
|
+
}
|
|
160
262
|
else if (provider === 'ollama') {
|
|
161
263
|
const baseUrl = config.llmBaseUrl || 'http://localhost:11434';
|
|
162
264
|
const available = await checkOllamaAvailable(baseUrl);
|
package/dist/cli/init.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { NetworkProfile } from '../config/store.js';
|
|
2
|
-
type LlmProvider = 'grok' | 'ollama' | 'openai-compatible';
|
|
2
|
+
type LlmProvider = 'grok' | 'openai' | 'anthropic' | 'google' | 'ollama' | 'openai-compatible';
|
|
3
3
|
export interface InitCommandOptions {
|
|
4
4
|
agentType?: string;
|
|
5
5
|
wallet?: string;
|
package/dist/cli/init.js
CHANGED
|
@@ -59,6 +59,21 @@ const LLM_CHOICES = [
|
|
|
59
59
|
value: 'grok',
|
|
60
60
|
short: 'Grok API',
|
|
61
61
|
},
|
|
62
|
+
{
|
|
63
|
+
name: `OpenAI ${chalk.gray('ChatGPT API')}`,
|
|
64
|
+
value: 'openai',
|
|
65
|
+
short: 'OpenAI',
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
name: `Claude ${chalk.gray('Anthropic API')}`,
|
|
69
|
+
value: 'anthropic',
|
|
70
|
+
short: 'Claude',
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
name: `Gemini ${chalk.gray('Google AI Studio API')}`,
|
|
74
|
+
value: 'google',
|
|
75
|
+
short: 'Gemini',
|
|
76
|
+
},
|
|
62
77
|
{
|
|
63
78
|
name: `Ollama ${chalk.gray('Local LLM (free)')}`,
|
|
64
79
|
value: 'ollama',
|
|
@@ -108,7 +123,12 @@ function normalizeControlPlaneUrl(input) {
|
|
|
108
123
|
function normalizeProvider(provider) {
|
|
109
124
|
if (!provider)
|
|
110
125
|
return undefined;
|
|
111
|
-
if (provider === 'grok'
|
|
126
|
+
if (provider === 'grok'
|
|
127
|
+
|| provider === 'openai'
|
|
128
|
+
|| provider === 'anthropic'
|
|
129
|
+
|| provider === 'google'
|
|
130
|
+
|| provider === 'ollama'
|
|
131
|
+
|| provider === 'openai-compatible') {
|
|
112
132
|
return provider;
|
|
113
133
|
}
|
|
114
134
|
return undefined;
|
|
@@ -121,8 +141,47 @@ function normalizeProfile(profile) {
|
|
|
121
141
|
}
|
|
122
142
|
return undefined;
|
|
123
143
|
}
|
|
124
|
-
function
|
|
125
|
-
|
|
144
|
+
function getRuntimeProviderKey(provider) {
|
|
145
|
+
if (provider === 'grok') {
|
|
146
|
+
return process.env.TERMINUS_GROK_API_KEY?.trim() || process.env.XAI_API_KEY?.trim() || undefined;
|
|
147
|
+
}
|
|
148
|
+
if (provider === 'openai') {
|
|
149
|
+
return process.env.TERMINUS_OPENAI_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim() || undefined;
|
|
150
|
+
}
|
|
151
|
+
if (provider === 'anthropic') {
|
|
152
|
+
return process.env.TERMINUS_ANTHROPIC_API_KEY?.trim() || process.env.ANTHROPIC_API_KEY?.trim() || undefined;
|
|
153
|
+
}
|
|
154
|
+
return (process.env.TERMINUS_GOOGLE_API_KEY?.trim()
|
|
155
|
+
|| process.env.GOOGLE_API_KEY?.trim()
|
|
156
|
+
|| process.env.GEMINI_API_KEY?.trim()
|
|
157
|
+
|| undefined);
|
|
158
|
+
}
|
|
159
|
+
function providerEnvHint(provider) {
|
|
160
|
+
if (provider === 'grok')
|
|
161
|
+
return 'TERMINUS_GROK_API_KEY or XAI_API_KEY';
|
|
162
|
+
if (provider === 'openai')
|
|
163
|
+
return 'TERMINUS_OPENAI_API_KEY or OPENAI_API_KEY';
|
|
164
|
+
if (provider === 'anthropic')
|
|
165
|
+
return 'TERMINUS_ANTHROPIC_API_KEY or ANTHROPIC_API_KEY';
|
|
166
|
+
return 'TERMINUS_GOOGLE_API_KEY or GOOGLE_API_KEY or GEMINI_API_KEY';
|
|
167
|
+
}
|
|
168
|
+
function providerLabel(provider) {
|
|
169
|
+
if (provider === 'grok')
|
|
170
|
+
return 'Grok';
|
|
171
|
+
if (provider === 'openai')
|
|
172
|
+
return 'OpenAI';
|
|
173
|
+
if (provider === 'anthropic')
|
|
174
|
+
return 'Claude';
|
|
175
|
+
return 'Gemini';
|
|
176
|
+
}
|
|
177
|
+
function providerDefaultModel(provider) {
|
|
178
|
+
if (provider === 'grok')
|
|
179
|
+
return 'grok-4-1-fast-non-reasoning';
|
|
180
|
+
if (provider === 'openai')
|
|
181
|
+
return 'gpt-4o-mini';
|
|
182
|
+
if (provider === 'anthropic')
|
|
183
|
+
return 'claude-3-5-haiku-latest';
|
|
184
|
+
return 'gemini-2.0-flash';
|
|
126
185
|
}
|
|
127
186
|
async function promptForAgentType(initialValue, nonInteractive) {
|
|
128
187
|
if (initialValue) {
|
|
@@ -194,48 +253,71 @@ async function promptForProvider(initialValue, nonInteractive) {
|
|
|
194
253
|
return llmProvider;
|
|
195
254
|
}
|
|
196
255
|
async function configureProvider(llmProvider, options, nonInteractive, ollamaModels) {
|
|
197
|
-
if (llmProvider === 'grok') {
|
|
198
|
-
const
|
|
256
|
+
if (llmProvider === 'grok' || llmProvider === 'openai' || llmProvider === 'anthropic' || llmProvider === 'google') {
|
|
257
|
+
const provider = llmProvider;
|
|
258
|
+
const runtimeProviderKey = getRuntimeProviderKey(provider);
|
|
259
|
+
const defaultModel = options.llmModel || providerDefaultModel(provider);
|
|
199
260
|
if (options.apiKey?.trim()) {
|
|
200
|
-
return {
|
|
261
|
+
return {
|
|
262
|
+
apiKey: options.apiKey.trim(),
|
|
263
|
+
llmModel: defaultModel,
|
|
264
|
+
};
|
|
201
265
|
}
|
|
202
266
|
if (nonInteractive) {
|
|
203
|
-
if (!
|
|
204
|
-
throw new Error(
|
|
267
|
+
if (!runtimeProviderKey) {
|
|
268
|
+
throw new Error(`${providerLabel(provider)} selected but no key provided. Set --apiKey or ${providerEnvHint(provider)}.`);
|
|
205
269
|
}
|
|
206
|
-
return {
|
|
270
|
+
return {
|
|
271
|
+
apiKey: '__ENV__',
|
|
272
|
+
llmModel: defaultModel,
|
|
273
|
+
};
|
|
207
274
|
}
|
|
208
|
-
|
|
209
|
-
|
|
275
|
+
let useRuntimeKey = false;
|
|
276
|
+
if (runtimeProviderKey) {
|
|
277
|
+
const answer = await inquirer.prompt([
|
|
210
278
|
{
|
|
211
279
|
type: 'confirm',
|
|
212
280
|
name: 'useRuntimeKey',
|
|
213
|
-
message:
|
|
281
|
+
message: `Use ${providerLabel(provider)} API key from runtime environment (recommended)?`,
|
|
214
282
|
default: true,
|
|
215
283
|
},
|
|
216
284
|
]);
|
|
217
|
-
|
|
218
|
-
return { apiKey: '__ENV__' };
|
|
219
|
-
}
|
|
285
|
+
useRuntimeKey = Boolean(answer.useRuntimeKey);
|
|
220
286
|
}
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
287
|
+
let apiKey = '__ENV__';
|
|
288
|
+
if (!useRuntimeKey) {
|
|
289
|
+
const { key } = await inquirer.prompt([
|
|
290
|
+
{
|
|
291
|
+
type: 'password',
|
|
292
|
+
name: 'key',
|
|
293
|
+
message: `${providerLabel(provider)} API key:`,
|
|
294
|
+
mask: '•',
|
|
295
|
+
validate: (input) => {
|
|
296
|
+
if (!input)
|
|
297
|
+
return 'API key is required';
|
|
298
|
+
if (provider === 'grok' && !input.startsWith('xai-')) {
|
|
299
|
+
return 'Grok API keys start with \"xai-\"';
|
|
300
|
+
}
|
|
301
|
+
if (input.length < 12)
|
|
302
|
+
return 'API key looks too short';
|
|
303
|
+
return true;
|
|
304
|
+
},
|
|
235
305
|
},
|
|
306
|
+
]);
|
|
307
|
+
apiKey = String(key).trim();
|
|
308
|
+
}
|
|
309
|
+
const { model } = await inquirer.prompt([
|
|
310
|
+
{
|
|
311
|
+
type: 'input',
|
|
312
|
+
name: 'model',
|
|
313
|
+
message: `${providerLabel(provider)} model name:`,
|
|
314
|
+
default: defaultModel,
|
|
236
315
|
},
|
|
237
316
|
]);
|
|
238
|
-
return {
|
|
317
|
+
return {
|
|
318
|
+
apiKey,
|
|
319
|
+
llmModel: String(model).trim(),
|
|
320
|
+
};
|
|
239
321
|
}
|
|
240
322
|
if (llmProvider === 'ollama') {
|
|
241
323
|
const defaultBaseUrl = options.llmBaseUrl || 'http://localhost:11434';
|
|
@@ -432,8 +514,9 @@ export async function initCommand(rawOptions = {}) {
|
|
|
432
514
|
console.log();
|
|
433
515
|
console.log(chalk.yellow('Important: set TERMINUS_WALLET_PRIVATE_KEY in your shell before running.'));
|
|
434
516
|
console.log(chalk.cyan(' export TERMINUS_WALLET_PRIVATE_KEY=0x...'));
|
|
435
|
-
if (llmProvider === 'grok'
|
|
436
|
-
|
|
517
|
+
if ((llmProvider === 'grok' || llmProvider === 'openai' || llmProvider === 'anthropic' || llmProvider === 'google')
|
|
518
|
+
&& providerConfig.apiKey === '__ENV__') {
|
|
519
|
+
console.log(chalk.yellow(`Important: ${providerLabel(llmProvider)} key will be read from ${providerEnvHint(llmProvider)}.`));
|
|
437
520
|
}
|
|
438
521
|
console.log(chalk.cyan('\nNext steps:'));
|
|
439
522
|
console.log(chalk.cyan(' npx terminus-agent doctor'));
|
package/dist/cli/run.js
CHANGED
|
@@ -26,7 +26,7 @@ function printStartupBanner(config) {
|
|
|
26
26
|
return;
|
|
27
27
|
const emoji = AGENT_EMOJIS[config.agentType] || '🤖';
|
|
28
28
|
const provider = config.llmProvider || 'grok';
|
|
29
|
-
const providerIcon = provider
|
|
29
|
+
const providerIcon = getProviderIcon(provider);
|
|
30
30
|
console.log();
|
|
31
31
|
console.log(chalk.cyan.bold('╔════════════════════════════════════════════════════════════╗'));
|
|
32
32
|
console.log(chalk.cyan.bold('║ 🚀 TERMINUS AGENT STARTING ║'));
|
|
@@ -41,6 +41,19 @@ function printStartupBanner(config) {
|
|
|
41
41
|
console.log(chalk.gray('\n────────────────────────────────────────────────────────────\n'));
|
|
42
42
|
console.log(chalk.yellow(' ⏳ Connecting to Control Plane...\n'));
|
|
43
43
|
}
|
|
44
|
+
function getProviderIcon(provider) {
|
|
45
|
+
if (provider === 'grok')
|
|
46
|
+
return '🌐';
|
|
47
|
+
if (provider === 'openai')
|
|
48
|
+
return '🧠';
|
|
49
|
+
if (provider === 'anthropic')
|
|
50
|
+
return '🧩';
|
|
51
|
+
if (provider === 'google')
|
|
52
|
+
return '🔷';
|
|
53
|
+
if (provider === 'ollama')
|
|
54
|
+
return '🦙';
|
|
55
|
+
return '🔧';
|
|
56
|
+
}
|
|
44
57
|
export async function runCommand() {
|
|
45
58
|
if (!configExists()) {
|
|
46
59
|
console.log();
|
package/dist/cli/status.js
CHANGED
|
@@ -55,10 +55,8 @@ export async function statusCommand() {
|
|
|
55
55
|
// LLM Provider Info
|
|
56
56
|
console.log(chalk.yellow('\n┌─ LLM Provider ─────────────────────────┐\n'));
|
|
57
57
|
const provider = config.llmProvider || 'grok';
|
|
58
|
-
const providerIcon = provider
|
|
59
|
-
const providerName = provider
|
|
60
|
-
: provider === 'ollama' ? 'Ollama (Local)'
|
|
61
|
-
: 'OpenAI-Compatible';
|
|
58
|
+
const providerIcon = getProviderIcon(provider);
|
|
59
|
+
const providerName = getProviderName(provider);
|
|
62
60
|
console.log(` ${chalk.gray('Provider:')} ${providerIcon} ${chalk.white(providerName)}`);
|
|
63
61
|
if (config.llmModel) {
|
|
64
62
|
console.log(` ${chalk.gray('Model:')} ${chalk.white(config.llmModel)}`);
|
|
@@ -71,8 +69,8 @@ export async function statusCommand() {
|
|
|
71
69
|
const ollamaOk = await checkOllamaAvailable(config.llmBaseUrl);
|
|
72
70
|
console.log(` ${chalk.gray('Status:')} ${ollamaOk ? chalk.green('✓ Connected') : chalk.red('✗ Not reachable')}`);
|
|
73
71
|
}
|
|
74
|
-
else if (provider
|
|
75
|
-
const hasRuntimeKey =
|
|
72
|
+
else if (provider !== 'openai-compatible') {
|
|
73
|
+
const hasRuntimeKey = hasRuntimeProviderKey(provider);
|
|
76
74
|
const hasStoredKey = Boolean(config.apiKey?.trim() && config.apiKey !== '__ENV__');
|
|
77
75
|
const source = hasRuntimeKey ? 'Runtime env' : hasStoredKey ? 'Config file' : 'Missing';
|
|
78
76
|
console.log(` ${chalk.gray('API Key:')} ${hasRuntimeKey || hasStoredKey ? chalk.green(`✓ ${source}`) : chalk.red('✗ Missing')}`);
|
|
@@ -102,3 +100,46 @@ export async function statusCommand() {
|
|
|
102
100
|
console.log(` ${chalk.cyan('npx terminus-agent doctor')} ${chalk.gray('Run readiness checks')}`);
|
|
103
101
|
console.log();
|
|
104
102
|
}
|
|
103
|
+
function getProviderIcon(provider) {
|
|
104
|
+
if (provider === 'grok')
|
|
105
|
+
return '🌐';
|
|
106
|
+
if (provider === 'openai')
|
|
107
|
+
return '🧠';
|
|
108
|
+
if (provider === 'anthropic')
|
|
109
|
+
return '🧩';
|
|
110
|
+
if (provider === 'google')
|
|
111
|
+
return '🔷';
|
|
112
|
+
if (provider === 'ollama')
|
|
113
|
+
return '🦙';
|
|
114
|
+
return '🔧';
|
|
115
|
+
}
|
|
116
|
+
function getProviderName(provider) {
|
|
117
|
+
if (provider === 'grok')
|
|
118
|
+
return 'Grok API (xAI Cloud)';
|
|
119
|
+
if (provider === 'openai')
|
|
120
|
+
return 'OpenAI API (ChatGPT)';
|
|
121
|
+
if (provider === 'anthropic')
|
|
122
|
+
return 'Claude API (Anthropic)';
|
|
123
|
+
if (provider === 'google')
|
|
124
|
+
return 'Gemini API (Google)';
|
|
125
|
+
if (provider === 'ollama')
|
|
126
|
+
return 'Ollama (Local)';
|
|
127
|
+
return 'OpenAI-Compatible';
|
|
128
|
+
}
|
|
129
|
+
function hasRuntimeProviderKey(provider) {
|
|
130
|
+
if (provider === 'grok') {
|
|
131
|
+
return Boolean(process.env.TERMINUS_GROK_API_KEY?.trim() || process.env.XAI_API_KEY?.trim());
|
|
132
|
+
}
|
|
133
|
+
if (provider === 'openai') {
|
|
134
|
+
return Boolean(process.env.TERMINUS_OPENAI_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim());
|
|
135
|
+
}
|
|
136
|
+
if (provider === 'anthropic') {
|
|
137
|
+
return Boolean(process.env.TERMINUS_ANTHROPIC_API_KEY?.trim() || process.env.ANTHROPIC_API_KEY?.trim());
|
|
138
|
+
}
|
|
139
|
+
if (provider === 'google') {
|
|
140
|
+
return Boolean(process.env.TERMINUS_GOOGLE_API_KEY?.trim()
|
|
141
|
+
|| process.env.GOOGLE_API_KEY?.trim()
|
|
142
|
+
|| process.env.GEMINI_API_KEY?.trim());
|
|
143
|
+
}
|
|
144
|
+
return false;
|
|
145
|
+
}
|
package/dist/config/store.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
export type NetworkProfile = 'local' | 'testnet' | 'mainnet';
|
|
2
|
+
export type LlmProvider = 'grok' | 'openai' | 'anthropic' | 'google' | 'ollama' | 'openai-compatible';
|
|
2
3
|
export interface AgentConfig {
|
|
3
4
|
agentType: string;
|
|
4
5
|
wallet: string;
|
|
@@ -6,7 +7,7 @@ export interface AgentConfig {
|
|
|
6
7
|
apiKey: string;
|
|
7
8
|
controlPlaneUrl: string;
|
|
8
9
|
nodeId: string;
|
|
9
|
-
llmProvider?:
|
|
10
|
+
llmProvider?: LlmProvider;
|
|
10
11
|
llmBaseUrl?: string;
|
|
11
12
|
llmModel?: string;
|
|
12
13
|
networkProfile?: NetworkProfile;
|
package/dist/config/store.js
CHANGED
|
@@ -50,11 +50,11 @@ export function validateConfig(config) {
|
|
|
50
50
|
errors.push('nodeId is required');
|
|
51
51
|
}
|
|
52
52
|
const provider = config.llmProvider || 'grok';
|
|
53
|
-
if (provider
|
|
53
|
+
if (requiresApiKey(provider)) {
|
|
54
54
|
const hasStoredKey = typeof config.apiKey === 'string' && config.apiKey.length > 0 && config.apiKey !== '__ENV__';
|
|
55
|
-
const hasRuntimeKey =
|
|
55
|
+
const hasRuntimeKey = hasRuntimeProviderKey(provider);
|
|
56
56
|
if (!hasStoredKey && !hasRuntimeKey) {
|
|
57
|
-
errors.push(
|
|
57
|
+
errors.push(`${provider} provider requires apiKey in config or runtime env`);
|
|
58
58
|
}
|
|
59
59
|
}
|
|
60
60
|
if (provider === 'openai-compatible' && !config.llmBaseUrl) {
|
|
@@ -71,7 +71,7 @@ export function saveConfig(config) {
|
|
|
71
71
|
}
|
|
72
72
|
securePermissions(CONFIG_DIR, DIR_MODE);
|
|
73
73
|
const provider = config.llmProvider || 'grok';
|
|
74
|
-
const normalizedApiKey = provider
|
|
74
|
+
const normalizedApiKey = requiresApiKey(provider)
|
|
75
75
|
? (config.apiKey || '__ENV__')
|
|
76
76
|
: (config.apiKey || '');
|
|
77
77
|
const normalizedProfile = normalizeNetworkProfile(config.networkProfile);
|
|
@@ -105,7 +105,12 @@ function normalizeConfig(raw) {
|
|
|
105
105
|
return null;
|
|
106
106
|
const value = raw;
|
|
107
107
|
const llmProvider = value.llmProvider;
|
|
108
|
-
const provider = llmProvider === '
|
|
108
|
+
const provider = llmProvider === 'grok'
|
|
109
|
+
|| llmProvider === 'openai'
|
|
110
|
+
|| llmProvider === 'anthropic'
|
|
111
|
+
|| llmProvider === 'google'
|
|
112
|
+
|| llmProvider === 'ollama'
|
|
113
|
+
|| llmProvider === 'openai-compatible'
|
|
109
114
|
? llmProvider
|
|
110
115
|
: 'grok';
|
|
111
116
|
const networkProfile = normalizeNetworkProfile(value.networkProfile);
|
|
@@ -146,3 +151,23 @@ function normalizeNetworkProfile(value) {
|
|
|
146
151
|
return value;
|
|
147
152
|
return 'local';
|
|
148
153
|
}
|
|
154
|
+
function requiresApiKey(provider) {
|
|
155
|
+
return provider === 'grok' || provider === 'openai' || provider === 'anthropic' || provider === 'google';
|
|
156
|
+
}
|
|
157
|
+
function hasRuntimeProviderKey(provider) {
|
|
158
|
+
if (provider === 'grok') {
|
|
159
|
+
return Boolean(process.env.TERMINUS_GROK_API_KEY?.trim() || process.env.XAI_API_KEY?.trim());
|
|
160
|
+
}
|
|
161
|
+
if (provider === 'openai') {
|
|
162
|
+
return Boolean(process.env.TERMINUS_OPENAI_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim());
|
|
163
|
+
}
|
|
164
|
+
if (provider === 'anthropic') {
|
|
165
|
+
return Boolean(process.env.TERMINUS_ANTHROPIC_API_KEY?.trim() || process.env.ANTHROPIC_API_KEY?.trim());
|
|
166
|
+
}
|
|
167
|
+
if (provider === 'google') {
|
|
168
|
+
return Boolean(process.env.TERMINUS_GOOGLE_API_KEY?.trim()
|
|
169
|
+
|| process.env.GOOGLE_API_KEY?.trim()
|
|
170
|
+
|| process.env.GEMINI_API_KEY?.trim());
|
|
171
|
+
}
|
|
172
|
+
return false;
|
|
173
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -25,7 +25,7 @@ program
|
|
|
25
25
|
.description('Initialize agent configuration')
|
|
26
26
|
.option('--agent-type <id>', 'Agent type id (example: travel-planner)')
|
|
27
27
|
.option('--wallet <address>', 'Wallet address for payouts')
|
|
28
|
-
.option('--llm-provider <provider>', 'grok | ollama | openai-compatible')
|
|
28
|
+
.option('--llm-provider <provider>', 'grok | openai | anthropic | google | ollama | openai-compatible')
|
|
29
29
|
.option('--api-key <key>', 'API key for provider')
|
|
30
30
|
.option('--llm-base-url <url>', 'Provider base URL')
|
|
31
31
|
.option('--llm-model <name>', 'LLM model name')
|
package/dist/llm/provider.d.ts
CHANGED
|
@@ -7,8 +7,9 @@ export interface LLMResponse {
|
|
|
7
7
|
model: string;
|
|
8
8
|
tokensUsed?: number;
|
|
9
9
|
}
|
|
10
|
+
export type LLMProviderType = 'grok' | 'openai' | 'anthropic' | 'google' | 'ollama' | 'openai-compatible';
|
|
10
11
|
export interface LLMProviderConfig {
|
|
11
|
-
provider:
|
|
12
|
+
provider: LLMProviderType;
|
|
12
13
|
apiKey?: string;
|
|
13
14
|
baseUrl?: string;
|
|
14
15
|
model?: string;
|
|
@@ -30,6 +31,36 @@ export declare class GrokProvider implements LLMProvider {
|
|
|
30
31
|
temperature?: number;
|
|
31
32
|
}): Promise<LLMResponse>;
|
|
32
33
|
}
|
|
34
|
+
export declare class OpenAIProvider implements LLMProvider {
|
|
35
|
+
name: string;
|
|
36
|
+
private apiKey;
|
|
37
|
+
private model;
|
|
38
|
+
constructor(apiKey: string, model?: string);
|
|
39
|
+
chat(messages: LLMMessage[], options?: {
|
|
40
|
+
maxTokens?: number;
|
|
41
|
+
temperature?: number;
|
|
42
|
+
}): Promise<LLMResponse>;
|
|
43
|
+
}
|
|
44
|
+
export declare class AnthropicProvider implements LLMProvider {
|
|
45
|
+
name: string;
|
|
46
|
+
private apiKey;
|
|
47
|
+
private model;
|
|
48
|
+
constructor(apiKey: string, model?: string);
|
|
49
|
+
chat(messages: LLMMessage[], options?: {
|
|
50
|
+
maxTokens?: number;
|
|
51
|
+
temperature?: number;
|
|
52
|
+
}): Promise<LLMResponse>;
|
|
53
|
+
}
|
|
54
|
+
export declare class GoogleProvider implements LLMProvider {
|
|
55
|
+
name: string;
|
|
56
|
+
private apiKey;
|
|
57
|
+
private model;
|
|
58
|
+
constructor(apiKey: string, model?: string);
|
|
59
|
+
chat(messages: LLMMessage[], options?: {
|
|
60
|
+
maxTokens?: number;
|
|
61
|
+
temperature?: number;
|
|
62
|
+
}): Promise<LLMResponse>;
|
|
63
|
+
}
|
|
33
64
|
export declare class OllamaProvider implements LLMProvider {
|
|
34
65
|
name: string;
|
|
35
66
|
private baseUrl;
|
package/dist/llm/provider.js
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
// TERMINUS AGENT - LLM Provider Interface
|
|
3
3
|
// =============================================================================
|
|
4
4
|
// Abstraction layer for different LLM backends.
|
|
5
|
-
// Supports: xAI Grok
|
|
5
|
+
// Supports: xAI Grok, OpenAI, Anthropic Claude, Google Gemini, Ollama,
|
|
6
|
+
// and OpenAI-compatible endpoints.
|
|
6
7
|
// =============================================================================
|
|
7
8
|
// =============================================================================
|
|
8
9
|
// xAI Grok Provider
|
|
@@ -20,7 +21,7 @@ export class GrokProvider {
|
|
|
20
21
|
method: 'POST',
|
|
21
22
|
headers: {
|
|
22
23
|
'Content-Type': 'application/json',
|
|
23
|
-
|
|
24
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
24
25
|
},
|
|
25
26
|
body: JSON.stringify({
|
|
26
27
|
model: this.model,
|
|
@@ -42,6 +43,139 @@ export class GrokProvider {
|
|
|
42
43
|
}
|
|
43
44
|
}
|
|
44
45
|
// =============================================================================
|
|
46
|
+
// OpenAI Provider
|
|
47
|
+
// =============================================================================
|
|
48
|
+
export class OpenAIProvider {
|
|
49
|
+
name = 'openai';
|
|
50
|
+
apiKey;
|
|
51
|
+
model;
|
|
52
|
+
constructor(apiKey, model = 'gpt-4o-mini') {
|
|
53
|
+
this.apiKey = apiKey;
|
|
54
|
+
this.model = model;
|
|
55
|
+
}
|
|
56
|
+
async chat(messages, options) {
|
|
57
|
+
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
|
58
|
+
method: 'POST',
|
|
59
|
+
headers: {
|
|
60
|
+
'Content-Type': 'application/json',
|
|
61
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
62
|
+
},
|
|
63
|
+
body: JSON.stringify({
|
|
64
|
+
model: this.model,
|
|
65
|
+
messages,
|
|
66
|
+
max_tokens: options?.maxTokens ?? 1024,
|
|
67
|
+
temperature: options?.temperature ?? 0.7,
|
|
68
|
+
}),
|
|
69
|
+
});
|
|
70
|
+
if (!response.ok) {
|
|
71
|
+
const error = await response.text();
|
|
72
|
+
throw new Error(`OpenAI API error: ${response.status} - ${error}`);
|
|
73
|
+
}
|
|
74
|
+
const data = await response.json();
|
|
75
|
+
return {
|
|
76
|
+
content: extractOpenAIContent(data.choices[0]?.message?.content),
|
|
77
|
+
model: this.model,
|
|
78
|
+
tokensUsed: data.usage?.total_tokens,
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// =============================================================================
|
|
83
|
+
// Anthropic Claude Provider
|
|
84
|
+
// =============================================================================
|
|
85
|
+
export class AnthropicProvider {
|
|
86
|
+
name = 'anthropic';
|
|
87
|
+
apiKey;
|
|
88
|
+
model;
|
|
89
|
+
constructor(apiKey, model = 'claude-3-5-haiku-latest') {
|
|
90
|
+
this.apiKey = apiKey;
|
|
91
|
+
this.model = model;
|
|
92
|
+
}
|
|
93
|
+
async chat(messages, options) {
|
|
94
|
+
const normalized = toAnthropicMessages(messages);
|
|
95
|
+
const response = await fetch('https://api.anthropic.com/v1/messages', {
|
|
96
|
+
method: 'POST',
|
|
97
|
+
headers: {
|
|
98
|
+
'Content-Type': 'application/json',
|
|
99
|
+
'x-api-key': this.apiKey,
|
|
100
|
+
'anthropic-version': '2023-06-01',
|
|
101
|
+
},
|
|
102
|
+
body: JSON.stringify({
|
|
103
|
+
model: this.model,
|
|
104
|
+
max_tokens: options?.maxTokens ?? 1024,
|
|
105
|
+
temperature: options?.temperature ?? 0.7,
|
|
106
|
+
system: normalized.systemPrompt || undefined,
|
|
107
|
+
messages: normalized.messages,
|
|
108
|
+
}),
|
|
109
|
+
});
|
|
110
|
+
if (!response.ok) {
|
|
111
|
+
const error = await response.text();
|
|
112
|
+
throw new Error(`Anthropic API error: ${response.status} - ${error}`);
|
|
113
|
+
}
|
|
114
|
+
const data = await response.json();
|
|
115
|
+
const content = data.content
|
|
116
|
+
.filter((part) => part.type === 'text' && typeof part.text === 'string')
|
|
117
|
+
.map((part) => part.text)
|
|
118
|
+
.join('\n')
|
|
119
|
+
.trim();
|
|
120
|
+
const inputTokens = data.usage?.input_tokens ?? 0;
|
|
121
|
+
const outputTokens = data.usage?.output_tokens ?? 0;
|
|
122
|
+
return {
|
|
123
|
+
content,
|
|
124
|
+
model: this.model,
|
|
125
|
+
tokensUsed: inputTokens + outputTokens,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// =============================================================================
|
|
130
|
+
// Google Gemini Provider
|
|
131
|
+
// =============================================================================
|
|
132
|
+
export class GoogleProvider {
|
|
133
|
+
name = 'google';
|
|
134
|
+
apiKey;
|
|
135
|
+
model;
|
|
136
|
+
constructor(apiKey, model = 'gemini-2.0-flash') {
|
|
137
|
+
this.apiKey = apiKey;
|
|
138
|
+
this.model = model;
|
|
139
|
+
}
|
|
140
|
+
async chat(messages, options) {
|
|
141
|
+
const normalized = toGoogleMessages(messages);
|
|
142
|
+
const encodedModel = encodeURIComponent(this.model);
|
|
143
|
+
const payload = {
|
|
144
|
+
contents: normalized.messages,
|
|
145
|
+
generationConfig: {
|
|
146
|
+
maxOutputTokens: options?.maxTokens ?? 1024,
|
|
147
|
+
temperature: options?.temperature ?? 0.7,
|
|
148
|
+
},
|
|
149
|
+
};
|
|
150
|
+
if (normalized.systemPrompt) {
|
|
151
|
+
payload.systemInstruction = {
|
|
152
|
+
parts: [{ text: normalized.systemPrompt }],
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
const response = await fetch(`https://generativelanguage.googleapis.com/v1beta/models/${encodedModel}:generateContent?key=${this.apiKey}`, {
|
|
156
|
+
method: 'POST',
|
|
157
|
+
headers: {
|
|
158
|
+
'Content-Type': 'application/json',
|
|
159
|
+
},
|
|
160
|
+
body: JSON.stringify(payload),
|
|
161
|
+
});
|
|
162
|
+
if (!response.ok) {
|
|
163
|
+
const error = await response.text();
|
|
164
|
+
throw new Error(`Google Gemini API error: ${response.status} - ${error}`);
|
|
165
|
+
}
|
|
166
|
+
const data = await response.json();
|
|
167
|
+
const content = data.candidates?.[0]?.content?.parts
|
|
168
|
+
?.map((part) => part.text || '')
|
|
169
|
+
.join('\n')
|
|
170
|
+
.trim() || '';
|
|
171
|
+
return {
|
|
172
|
+
content,
|
|
173
|
+
model: this.model,
|
|
174
|
+
tokensUsed: data.usageMetadata?.totalTokenCount,
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
// =============================================================================
|
|
45
179
|
// Ollama Provider (Local LLM)
|
|
46
180
|
// =============================================================================
|
|
47
181
|
export class OllamaProvider {
|
|
@@ -49,11 +183,10 @@ export class OllamaProvider {
|
|
|
49
183
|
baseUrl;
|
|
50
184
|
model;
|
|
51
185
|
constructor(baseUrl = 'http://localhost:11434', model = 'llama3') {
|
|
52
|
-
this.baseUrl = baseUrl.replace(/\/$/, '');
|
|
186
|
+
this.baseUrl = baseUrl.replace(/\/$/, '');
|
|
53
187
|
this.model = model;
|
|
54
188
|
}
|
|
55
189
|
async chat(messages, options) {
|
|
56
|
-
// Ollama uses /api/chat endpoint
|
|
57
190
|
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
58
191
|
method: 'POST',
|
|
59
192
|
headers: {
|
|
@@ -129,14 +262,34 @@ export class OpenAICompatibleProvider {
|
|
|
129
262
|
// =============================================================================
|
|
130
263
|
export function createLLMProvider(config) {
|
|
131
264
|
switch (config.provider) {
|
|
132
|
-
case 'grok':
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
265
|
+
case 'grok': {
|
|
266
|
+
const apiKey = resolveProviderApiKey('grok', config.apiKey);
|
|
267
|
+
if (!apiKey) {
|
|
268
|
+
throw new Error('Grok provider requires a key in config or TERMINUS_GROK_API_KEY/XAI_API_KEY env');
|
|
269
|
+
}
|
|
270
|
+
return new GrokProvider(apiKey, config.model);
|
|
271
|
+
}
|
|
272
|
+
case 'openai': {
|
|
273
|
+
const apiKey = resolveProviderApiKey('openai', config.apiKey);
|
|
274
|
+
if (!apiKey) {
|
|
275
|
+
throw new Error('OpenAI provider requires a key in config or TERMINUS_OPENAI_API_KEY/OPENAI_API_KEY env');
|
|
139
276
|
}
|
|
277
|
+
return new OpenAIProvider(apiKey, config.model || 'gpt-4o-mini');
|
|
278
|
+
}
|
|
279
|
+
case 'anthropic': {
|
|
280
|
+
const apiKey = resolveProviderApiKey('anthropic', config.apiKey);
|
|
281
|
+
if (!apiKey) {
|
|
282
|
+
throw new Error('Anthropic provider requires a key in config or TERMINUS_ANTHROPIC_API_KEY/ANTHROPIC_API_KEY env');
|
|
283
|
+
}
|
|
284
|
+
return new AnthropicProvider(apiKey, config.model || 'claude-3-5-haiku-latest');
|
|
285
|
+
}
|
|
286
|
+
case 'google': {
|
|
287
|
+
const apiKey = resolveProviderApiKey('google', config.apiKey);
|
|
288
|
+
if (!apiKey) {
|
|
289
|
+
throw new Error('Google provider requires a key in config or TERMINUS_GOOGLE_API_KEY/GOOGLE_API_KEY/GEMINI_API_KEY env');
|
|
290
|
+
}
|
|
291
|
+
return new GoogleProvider(apiKey, config.model || 'gemini-2.0-flash');
|
|
292
|
+
}
|
|
140
293
|
case 'ollama':
|
|
141
294
|
return new OllamaProvider(config.baseUrl || 'http://localhost:11434', config.model || 'llama3');
|
|
142
295
|
case 'openai-compatible':
|
|
@@ -149,7 +302,7 @@ export function createLLMProvider(config) {
|
|
|
149
302
|
}
|
|
150
303
|
}
|
|
151
304
|
// =============================================================================
|
|
152
|
-
//
|
|
305
|
+
// Provider helpers
|
|
153
306
|
// =============================================================================
|
|
154
307
|
export async function checkOllamaAvailable(baseUrl = 'http://localhost:11434') {
|
|
155
308
|
try {
|
|
@@ -166,16 +319,80 @@ export async function listOllamaModels(baseUrl = 'http://localhost:11434') {
|
|
|
166
319
|
if (!response.ok)
|
|
167
320
|
return [];
|
|
168
321
|
const data = await response.json();
|
|
169
|
-
return data.models?.map(m => m.name) || [];
|
|
322
|
+
return data.models?.map((m) => m.name) || [];
|
|
170
323
|
}
|
|
171
324
|
catch {
|
|
172
325
|
return [];
|
|
173
326
|
}
|
|
174
327
|
}
|
|
175
|
-
function
|
|
328
|
+
function resolveProviderApiKey(provider, configApiKey) {
|
|
176
329
|
const key = configApiKey?.trim();
|
|
177
330
|
if (key && key !== '__ENV__')
|
|
178
331
|
return key;
|
|
179
|
-
|
|
180
|
-
|
|
332
|
+
if (provider === 'grok') {
|
|
333
|
+
return process.env.TERMINUS_GROK_API_KEY?.trim() || process.env.XAI_API_KEY?.trim() || undefined;
|
|
334
|
+
}
|
|
335
|
+
if (provider === 'openai') {
|
|
336
|
+
return process.env.TERMINUS_OPENAI_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim() || undefined;
|
|
337
|
+
}
|
|
338
|
+
if (provider === 'anthropic') {
|
|
339
|
+
return process.env.TERMINUS_ANTHROPIC_API_KEY?.trim() || process.env.ANTHROPIC_API_KEY?.trim() || undefined;
|
|
340
|
+
}
|
|
341
|
+
return (process.env.TERMINUS_GOOGLE_API_KEY?.trim()
|
|
342
|
+
|| process.env.GOOGLE_API_KEY?.trim()
|
|
343
|
+
|| process.env.GEMINI_API_KEY?.trim()
|
|
344
|
+
|| undefined);
|
|
345
|
+
}
|
|
346
|
+
function extractOpenAIContent(content) {
|
|
347
|
+
if (!content)
|
|
348
|
+
return '';
|
|
349
|
+
if (typeof content === 'string')
|
|
350
|
+
return content;
|
|
351
|
+
return content
|
|
352
|
+
.filter((part) => part.type === 'text' && typeof part.text === 'string')
|
|
353
|
+
.map((part) => part.text)
|
|
354
|
+
.join('\n')
|
|
355
|
+
.trim();
|
|
356
|
+
}
|
|
357
|
+
function toAnthropicMessages(messages) {
|
|
358
|
+
const systemChunks = [];
|
|
359
|
+
const converted = [];
|
|
360
|
+
for (const message of messages) {
|
|
361
|
+
if (message.role === 'system') {
|
|
362
|
+
systemChunks.push(message.content);
|
|
363
|
+
continue;
|
|
364
|
+
}
|
|
365
|
+
converted.push({
|
|
366
|
+
role: message.role,
|
|
367
|
+
content: message.content,
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
if (converted.length === 0) {
|
|
371
|
+
converted.push({ role: 'user', content: 'Continue.' });
|
|
372
|
+
}
|
|
373
|
+
return {
|
|
374
|
+
systemPrompt: systemChunks.join('\n\n').trim(),
|
|
375
|
+
messages: converted,
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
function toGoogleMessages(messages) {
|
|
379
|
+
const systemChunks = [];
|
|
380
|
+
const converted = [];
|
|
381
|
+
for (const message of messages) {
|
|
382
|
+
if (message.role === 'system') {
|
|
383
|
+
systemChunks.push(message.content);
|
|
384
|
+
continue;
|
|
385
|
+
}
|
|
386
|
+
converted.push({
|
|
387
|
+
role: message.role === 'assistant' ? 'model' : 'user',
|
|
388
|
+
parts: [{ text: message.content }],
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
if (converted.length === 0) {
|
|
392
|
+
converted.push({ role: 'user', parts: [{ text: 'Continue.' }] });
|
|
393
|
+
}
|
|
394
|
+
return {
|
|
395
|
+
systemPrompt: systemChunks.join('\n\n').trim(),
|
|
396
|
+
messages: converted,
|
|
397
|
+
};
|
|
181
398
|
}
|