@exagent/agent 0.3.4 → 0.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-WTECTX2Z.js → chunk-ZRAOPQQW.js} +208 -147
- package/dist/cli.js +57 -37
- package/dist/index.js +1 -1
- package/package.json +18 -18
- package/src/cli.ts +32 -23
- package/src/config.ts +6 -3
- package/src/llm/anthropic.ts +3 -3
- package/src/llm/deepseek.ts +3 -3
- package/src/llm/google.ts +3 -3
- package/src/llm/groq.ts +3 -3
- package/src/llm/mistral.ts +3 -3
- package/src/llm/ollama.ts +3 -3
- package/src/llm/openai.ts +46 -3
- package/src/llm/together.ts +3 -3
- package/src/llm-providers.ts +8 -0
- package/src/prediction/client.ts +11 -4
- package/src/runtime.ts +3 -3
- package/src/setup.ts +29 -20
- package/src/strategy/loader.ts +136 -62
- package/src/strategy/templates.ts +0 -51
- package/test/strategy-loader.test.ts +150 -0
- package/.turbo/turbo-build.log +0 -17
- package/test-bridge-arb-to-base.mjs +0 -223
- package/test-funded-check.mjs +0 -79
- package/test-funded-phase19.mjs +0 -933
- package/test-hl-deposit-recover.mjs +0 -281
- package/test-hl-withdraw.mjs +0 -372
- package/test-live-signing.mjs +0 -374
- package/test-phase7.mjs +0 -416
- package/test-recover-arb.mjs +0 -206
- package/test-spot-bridge.mjs +0 -248
- package/test-wallet-setup.mjs +0 -126
package/dist/cli.js
CHANGED
|
@@ -9,7 +9,7 @@ import {
|
|
|
9
9
|
updateSecureStore,
|
|
10
10
|
writeConfigFile,
|
|
11
11
|
writeSampleConfig
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-ZRAOPQQW.js";
|
|
13
13
|
|
|
14
14
|
// src/cli.ts
|
|
15
15
|
import { Command } from "commander";
|
|
@@ -79,6 +79,15 @@ function printError(message) {
|
|
|
79
79
|
console.log(` ${pc.red("\u2717")} ${message}`);
|
|
80
80
|
}
|
|
81
81
|
|
|
82
|
+
// src/llm-providers.ts
|
|
83
|
+
import {
|
|
84
|
+
LLM_PROVIDERS,
|
|
85
|
+
getDefaultModel,
|
|
86
|
+
getProvider,
|
|
87
|
+
getProviderIds,
|
|
88
|
+
providerRequiresApiKey
|
|
89
|
+
} from "@exagent/sdk";
|
|
90
|
+
|
|
82
91
|
// src/setup.ts
|
|
83
92
|
function expandHomeDir(path) {
|
|
84
93
|
if (!path.startsWith("~/")) return path;
|
|
@@ -173,7 +182,6 @@ async function setupWallet(config) {
|
|
|
173
182
|
printDone(`Wallet imported: ${pc.dim(address)}`);
|
|
174
183
|
return privateKey;
|
|
175
184
|
}
|
|
176
|
-
var LLM_PROVIDERS = ["openai", "anthropic", "google", "deepseek", "mistral", "groq", "together", "ollama"];
|
|
177
185
|
async function setupLlm(config) {
|
|
178
186
|
if (isNonInteractive()) {
|
|
179
187
|
const provider2 = process.env.EXAGENT_LLM_PROVIDER || config.llm?.provider;
|
|
@@ -181,12 +189,14 @@ async function setupLlm(config) {
|
|
|
181
189
|
const apiKey2 = process.env.EXAGENT_LLM_KEY;
|
|
182
190
|
if (!provider2) throw new Error("EXAGENT_LLM_PROVIDER required in non-interactive mode");
|
|
183
191
|
if (!model2) throw new Error("EXAGENT_LLM_MODEL required in non-interactive mode");
|
|
184
|
-
if (!apiKey2)
|
|
192
|
+
if (providerRequiresApiKey(provider2) && !apiKey2) {
|
|
193
|
+
throw new Error("EXAGENT_LLM_KEY required in non-interactive mode");
|
|
194
|
+
}
|
|
185
195
|
printDone("LLM configured");
|
|
186
196
|
return { provider: provider2, model: model2, apiKey: apiKey2 };
|
|
187
197
|
}
|
|
188
198
|
const defaultProvider = config.llm?.provider;
|
|
189
|
-
const providerOptions = LLM_PROVIDERS.map((p) => ({ value: p, label: p }));
|
|
199
|
+
const providerOptions = LLM_PROVIDERS.map((p) => ({ value: p.id, label: p.label }));
|
|
190
200
|
const selected = await clack.select({
|
|
191
201
|
message: "LLM provider:",
|
|
192
202
|
options: providerOptions,
|
|
@@ -195,21 +205,26 @@ async function setupLlm(config) {
|
|
|
195
205
|
if (clack.isCancel(selected)) cancelled();
|
|
196
206
|
const provider = selected;
|
|
197
207
|
const defaultModel = config.llm?.model;
|
|
198
|
-
const
|
|
208
|
+
const providerInfo = getProvider(provider);
|
|
209
|
+
const modelOptions = providerInfo ? providerInfo.models.map((m) => ({ value: m.id, label: m.label })) : [{ value: defaultModel || getDefaultModel("openai"), label: defaultModel || getDefaultModel("openai") }];
|
|
210
|
+
const selectedModel = await clack.select({
|
|
199
211
|
message: "LLM model:",
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
validate: (val) => {
|
|
203
|
-
if (!val.trim()) return "Model name is required.";
|
|
204
|
-
}
|
|
212
|
+
options: modelOptions,
|
|
213
|
+
initialValue: defaultModel || void 0
|
|
205
214
|
});
|
|
206
|
-
if (clack.isCancel(
|
|
207
|
-
const model =
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
215
|
+
if (clack.isCancel(selectedModel)) cancelled();
|
|
216
|
+
const model = selectedModel;
|
|
217
|
+
let apiKey;
|
|
218
|
+
if (providerRequiresApiKey(provider)) {
|
|
219
|
+
const enteredApiKey = await clack.password({
|
|
220
|
+
message: "LLM API key:",
|
|
221
|
+
validate: (val) => validateLlmKeyFormat(provider, val)
|
|
222
|
+
});
|
|
223
|
+
if (clack.isCancel(enteredApiKey)) cancelled();
|
|
224
|
+
apiKey = enteredApiKey;
|
|
225
|
+
} else {
|
|
226
|
+
printInfo("Ollama uses your local server; no API key needed.");
|
|
227
|
+
}
|
|
213
228
|
printDone("LLM configured");
|
|
214
229
|
return { provider, model, apiKey };
|
|
215
230
|
}
|
|
@@ -344,7 +359,7 @@ async function ensureLocalSetup(configPath) {
|
|
|
344
359
|
// src/cli.ts
|
|
345
360
|
import * as clack2 from "@clack/prompts";
|
|
346
361
|
var program = new Command();
|
|
347
|
-
program.name("exagent").description("Exagent \u2014 LLM trading agent runtime").version("0.3.
|
|
362
|
+
program.name("exagent").description("Exagent \u2014 LLM trading agent runtime").version("0.3.5");
|
|
348
363
|
program.command("init").description("Create a sample agent configuration file").option("--agent-id <id>", "Agent ID (from dashboard)", "my-agent").option("--api-url <url>", "API server URL", "http://localhost:3002").option("--config <path>", "Config file path", "agent-config.json").action((opts) => {
|
|
349
364
|
printBanner();
|
|
350
365
|
writeSampleConfig(opts.agentId, opts.apiUrl, opts.config);
|
|
@@ -420,7 +435,6 @@ program.command("status").description("Check agent status").option("--config <pa
|
|
|
420
435
|
process.exit(1);
|
|
421
436
|
}
|
|
422
437
|
});
|
|
423
|
-
var LLM_PROVIDERS2 = ["openai", "anthropic", "google", "deepseek", "mistral", "groq", "together", "ollama"];
|
|
424
438
|
program.command("config").description("Change LLM provider, model, or API key").option("--config <path>", "Config file path", "agent-config.json").action(async (opts) => {
|
|
425
439
|
try {
|
|
426
440
|
printBanner();
|
|
@@ -459,7 +473,7 @@ program.command("config").description("Change LLM provider, model, or API key").
|
|
|
459
473
|
if (action === "all") {
|
|
460
474
|
const selectedProvider = await clack2.select({
|
|
461
475
|
message: "LLM provider:",
|
|
462
|
-
options:
|
|
476
|
+
options: LLM_PROVIDERS.map((p) => ({ value: p.id, label: p.label })),
|
|
463
477
|
initialValue: currentProvider || void 0
|
|
464
478
|
});
|
|
465
479
|
if (clack2.isCancel(selectedProvider)) {
|
|
@@ -467,29 +481,35 @@ program.command("config").description("Change LLM provider, model, or API key").
|
|
|
467
481
|
process.exit(0);
|
|
468
482
|
}
|
|
469
483
|
newProvider = selectedProvider;
|
|
470
|
-
const
|
|
484
|
+
const provider = getProvider(newProvider);
|
|
485
|
+
const modelOptions = provider ? provider.models.map((m) => ({ value: m.id, label: m.label })) : [{ value: currentModel, label: currentModel }];
|
|
486
|
+
const selectedModel = await clack2.select({
|
|
471
487
|
message: "LLM model:",
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
if (!val?.trim()) return "Model name is required.";
|
|
475
|
-
}
|
|
488
|
+
options: modelOptions,
|
|
489
|
+
initialValue: currentModel || void 0
|
|
476
490
|
});
|
|
477
|
-
if (clack2.isCancel(
|
|
491
|
+
if (clack2.isCancel(selectedModel)) {
|
|
478
492
|
clack2.cancel("Cancelled.");
|
|
479
493
|
process.exit(0);
|
|
480
494
|
}
|
|
481
|
-
newModel =
|
|
495
|
+
newModel = selectedModel;
|
|
482
496
|
}
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
497
|
+
let newKey;
|
|
498
|
+
if (providerRequiresApiKey(newProvider)) {
|
|
499
|
+
const enteredKey = await clack2.password({
|
|
500
|
+
message: "New LLM API key:",
|
|
501
|
+
validate: (val) => {
|
|
502
|
+
if (!val?.trim()) return "API key is required.";
|
|
503
|
+
if (val.length < 10) return "API key seems too short.";
|
|
504
|
+
}
|
|
505
|
+
});
|
|
506
|
+
if (clack2.isCancel(enteredKey)) {
|
|
507
|
+
clack2.cancel("Cancelled.");
|
|
508
|
+
process.exit(0);
|
|
488
509
|
}
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
process.exit(0);
|
|
510
|
+
newKey = enteredKey;
|
|
511
|
+
} else {
|
|
512
|
+
printInfo("Ollama uses your local server; no API key needed.");
|
|
493
513
|
}
|
|
494
514
|
updateSecureStore(secureStorePath, password3, { llmApiKey: newKey });
|
|
495
515
|
const updatedConfig = readConfigFile(opts.config);
|
|
@@ -503,7 +523,7 @@ program.command("config").description("Change LLM provider, model, or API key").
|
|
|
503
523
|
printSuccess("Updated", [
|
|
504
524
|
`${pc.dim("Provider:")} ${pc.cyan(newProvider)}`,
|
|
505
525
|
`${pc.dim("Model:")} ${pc.cyan(newModel)}`,
|
|
506
|
-
`${pc.dim("API key:")} ${pc.dim(`${newKey.slice(0, 7)}...${newKey.slice(-4)}`)}`,
|
|
526
|
+
`${pc.dim("API key:")} ${newKey ? pc.dim(`${newKey.slice(0, 7)}...${newKey.slice(-4)}`) : pc.dim("not required")}`,
|
|
507
527
|
"",
|
|
508
528
|
`Run ${pc.cyan("npx exagent run")} to start with the new configuration.`
|
|
509
529
|
]);
|
package/dist/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@exagent/agent",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.6",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -13,31 +13,31 @@
|
|
|
13
13
|
"types": "./dist/index.d.ts"
|
|
14
14
|
}
|
|
15
15
|
},
|
|
16
|
-
"scripts": {
|
|
17
|
-
"build": "tsup src/index.ts src/cli.ts --format esm --dts",
|
|
18
|
-
"dev": "tsup src/index.ts src/cli.ts --format esm --dts --watch"
|
|
19
|
-
},
|
|
20
16
|
"dependencies": {
|
|
21
17
|
"@clack/prompts": "^1.1.0",
|
|
22
|
-
"@
|
|
23
|
-
"@polymarket/clob-client": "^4.0.0",
|
|
18
|
+
"@polymarket/clob-client": "^5.8.1",
|
|
24
19
|
"boxen": "^8.0.1",
|
|
25
|
-
"commander": "^12.
|
|
26
|
-
"ethers": "^5.7.2",
|
|
20
|
+
"commander": "^12.1.0",
|
|
27
21
|
"figlet": "^1.10.0",
|
|
28
22
|
"gradient-string": "^3.0.0",
|
|
29
23
|
"picocolors": "^1.1.1",
|
|
30
|
-
"viem": "^2.
|
|
31
|
-
"ws": "^8.
|
|
32
|
-
"zod": "^3.
|
|
24
|
+
"viem": "^2.48.11",
|
|
25
|
+
"ws": "^8.20.0",
|
|
26
|
+
"zod": "^3.25.76",
|
|
27
|
+
"@exagent/sdk": "0.2.2"
|
|
33
28
|
},
|
|
34
29
|
"devDependencies": {
|
|
35
30
|
"@types/figlet": "^1.7.0",
|
|
36
31
|
"@types/gradient-string": "^1.1.6",
|
|
37
|
-
"@types/node": "^
|
|
38
|
-
"@types/ws": "^8.
|
|
39
|
-
"tsup": "^8.
|
|
40
|
-
"tsx": "^4.
|
|
41
|
-
"typescript": "^5.
|
|
32
|
+
"@types/node": "^22.19.18",
|
|
33
|
+
"@types/ws": "^8.18.1",
|
|
34
|
+
"tsup": "^8.5.1",
|
|
35
|
+
"tsx": "^4.21.0",
|
|
36
|
+
"typescript": "^5.9.3"
|
|
37
|
+
},
|
|
38
|
+
"scripts": {
|
|
39
|
+
"build": "tsup src/index.ts src/cli.ts --format esm --dts",
|
|
40
|
+
"dev": "tsup src/index.ts src/cli.ts --format esm --dts --watch",
|
|
41
|
+
"test": "tsx --test test/**/*.test.ts"
|
|
42
42
|
}
|
|
43
|
-
}
|
|
43
|
+
}
|
package/src/cli.ts
CHANGED
|
@@ -12,7 +12,7 @@ const program = new Command();
|
|
|
12
12
|
program
|
|
13
13
|
.name('exagent')
|
|
14
14
|
.description('Exagent — LLM trading agent runtime')
|
|
15
|
-
.version('0.3.
|
|
15
|
+
.version('0.3.5');
|
|
16
16
|
|
|
17
17
|
program
|
|
18
18
|
.command('init')
|
|
@@ -120,7 +120,7 @@ program
|
|
|
120
120
|
}
|
|
121
121
|
});
|
|
122
122
|
|
|
123
|
-
|
|
123
|
+
import { LLM_PROVIDERS, getProvider, providerRequiresApiKey } from './llm-providers.js';
|
|
124
124
|
|
|
125
125
|
program
|
|
126
126
|
.command('config')
|
|
@@ -170,13 +170,13 @@ program
|
|
|
170
170
|
process.exit(0);
|
|
171
171
|
}
|
|
172
172
|
|
|
173
|
-
let newProvider = currentProvider;
|
|
174
|
-
let newModel = currentModel;
|
|
173
|
+
let newProvider: string = currentProvider;
|
|
174
|
+
let newModel: string = currentModel;
|
|
175
175
|
|
|
176
176
|
if (action === 'all') {
|
|
177
177
|
const selectedProvider = await clack.select({
|
|
178
178
|
message: 'LLM provider:',
|
|
179
|
-
options: LLM_PROVIDERS.map(p => ({ value: p, label: p })),
|
|
179
|
+
options: LLM_PROVIDERS.map(p => ({ value: p.id, label: p.label })),
|
|
180
180
|
initialValue: currentProvider || undefined,
|
|
181
181
|
});
|
|
182
182
|
if (clack.isCancel(selectedProvider)) {
|
|
@@ -185,30 +185,39 @@ program
|
|
|
185
185
|
}
|
|
186
186
|
newProvider = selectedProvider;
|
|
187
187
|
|
|
188
|
-
const
|
|
188
|
+
const provider = getProvider(newProvider);
|
|
189
|
+
const modelOptions = provider
|
|
190
|
+
? provider.models.map(m => ({ value: m.id, label: m.label }))
|
|
191
|
+
: [{ value: currentModel, label: currentModel }];
|
|
192
|
+
|
|
193
|
+
const selectedModel = await clack.select({
|
|
189
194
|
message: 'LLM model:',
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
if (!val?.trim()) return 'Model name is required.';
|
|
193
|
-
},
|
|
195
|
+
options: modelOptions,
|
|
196
|
+
initialValue: currentModel || undefined,
|
|
194
197
|
});
|
|
195
|
-
if (clack.isCancel(
|
|
198
|
+
if (clack.isCancel(selectedModel)) {
|
|
196
199
|
clack.cancel('Cancelled.');
|
|
197
200
|
process.exit(0);
|
|
198
201
|
}
|
|
199
|
-
newModel =
|
|
202
|
+
newModel = selectedModel;
|
|
200
203
|
}
|
|
201
204
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
205
|
+
let newKey: string | undefined;
|
|
206
|
+
if (providerRequiresApiKey(newProvider)) {
|
|
207
|
+
const enteredKey = await clack.password({
|
|
208
|
+
message: 'New LLM API key:',
|
|
209
|
+
validate: (val) => {
|
|
210
|
+
if (!val?.trim()) return 'API key is required.';
|
|
211
|
+
if (val.length < 10) return 'API key seems too short.';
|
|
212
|
+
},
|
|
213
|
+
});
|
|
214
|
+
if (clack.isCancel(enteredKey)) {
|
|
215
|
+
clack.cancel('Cancelled.');
|
|
216
|
+
process.exit(0);
|
|
217
|
+
}
|
|
218
|
+
newKey = enteredKey;
|
|
219
|
+
} else {
|
|
220
|
+
printInfo('Ollama uses your local server; no API key needed.');
|
|
212
221
|
}
|
|
213
222
|
|
|
214
223
|
// Update secure store with new API key
|
|
@@ -228,7 +237,7 @@ program
|
|
|
228
237
|
printSuccess('Updated', [
|
|
229
238
|
`${pc.dim('Provider:')} ${pc.cyan(newProvider)}`,
|
|
230
239
|
`${pc.dim('Model:')} ${pc.cyan(newModel)}`,
|
|
231
|
-
`${pc.dim('API key:')} ${pc.dim(`${newKey.slice(0, 7)}...${newKey.slice(-4)}`)}`,
|
|
240
|
+
`${pc.dim('API key:')} ${newKey ? pc.dim(`${newKey.slice(0, 7)}...${newKey.slice(-4)}`) : pc.dim('not required')}`,
|
|
232
241
|
'',
|
|
233
242
|
`Run ${pc.cyan('npx exagent run')} to start with the new configuration.`,
|
|
234
243
|
]);
|
package/src/config.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { chmodSync, existsSync, readFileSync, writeFileSync } from 'node:fs';
|
|
|
3
3
|
import { homedir } from 'node:os';
|
|
4
4
|
import { dirname, resolve } from 'node:path';
|
|
5
5
|
import { z } from 'zod';
|
|
6
|
+
import { LLM_PROVIDER_IDS, providerRequiresApiKey } from '@exagent/sdk';
|
|
6
7
|
import type { LLMProvider } from '@exagent/sdk';
|
|
7
8
|
|
|
8
9
|
export interface RuntimeConfig {
|
|
@@ -115,7 +116,7 @@ export interface LoadConfigOptions {
|
|
|
115
116
|
getSecretPassword?: () => Promise<string>;
|
|
116
117
|
}
|
|
117
118
|
|
|
118
|
-
const providerEnum = z.enum(
|
|
119
|
+
const providerEnum = z.enum(LLM_PROVIDER_IDS);
|
|
119
120
|
|
|
120
121
|
const runtimeSchema = z.object({
|
|
121
122
|
agentId: z.string(),
|
|
@@ -379,7 +380,9 @@ export async function loadConfig(path: string = 'agent-config.json', options: Lo
|
|
|
379
380
|
config.wallet = { privateKey: process.env.EXAGENT_WALLET_PRIVATE_KEY };
|
|
380
381
|
}
|
|
381
382
|
|
|
382
|
-
|
|
383
|
+
const llmNeedsApiKey = providerRequiresApiKey(String(llm.provider || ''));
|
|
384
|
+
|
|
385
|
+
if ((!config.apiToken || (llmNeedsApiKey && !llm.apiKey) || !config.wallet) && parsed.secrets?.secureStorePath) {
|
|
383
386
|
const password = process.env.EXAGENT_SECRET_PASSWORD || await options.getSecretPassword?.();
|
|
384
387
|
if (!password) {
|
|
385
388
|
throw new Error('Encrypted secret store found, but no password was provided.');
|
|
@@ -391,7 +394,7 @@ export async function loadConfig(path: string = 'agent-config.json', options: Lo
|
|
|
391
394
|
if (!llm.apiKey && secrets.llmApiKey) llm.apiKey = secrets.llmApiKey;
|
|
392
395
|
}
|
|
393
396
|
|
|
394
|
-
if ((!config.apiToken || !llm.apiKey || !config.wallet) && parsed.secrets?.bootstrapToken && !parsed.secrets?.secureStorePath) {
|
|
397
|
+
if ((!config.apiToken || (llmNeedsApiKey && !llm.apiKey) || !config.wallet) && parsed.secrets?.bootstrapToken && !parsed.secrets?.secureStorePath) {
|
|
395
398
|
throw new Error(`Config ${path} still requires first-time secure setup. Run 'exagent setup --config ${path}' or start the agent interactively.`);
|
|
396
399
|
}
|
|
397
400
|
|
package/src/llm/anthropic.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class AnthropicAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class AnthropicAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
const nonSystemMessages = messages.filter(m => m.role !== 'system');
|
|
15
15
|
|
|
16
16
|
const body: Record<string, unknown> = {
|
|
17
|
-
model: this.config.model || '
|
|
17
|
+
model: this.config.model || getDefaultModel('anthropic'),
|
|
18
18
|
messages: nonSystemMessages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
max_tokens: this.getMaxTokens(),
|
|
20
20
|
temperature: this.getTemperature(),
|
|
@@ -57,7 +57,7 @@ export class AnthropicAdapter extends BaseLLMAdapter {
|
|
|
57
57
|
getMetadata(): LLMMetadata {
|
|
58
58
|
return {
|
|
59
59
|
provider: 'anthropic',
|
|
60
|
-
model: this.config.model || '
|
|
60
|
+
model: this.config.model || getDefaultModel('anthropic'),
|
|
61
61
|
};
|
|
62
62
|
}
|
|
63
63
|
}
|
package/src/llm/deepseek.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class DeepSeekAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class DeepSeekAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
Authorization: `Bearer ${this.config.apiKey}`,
|
|
15
15
|
},
|
|
16
16
|
body: JSON.stringify({
|
|
17
|
-
model: this.config.model || 'deepseek
|
|
17
|
+
model: this.config.model || getDefaultModel('deepseek'),
|
|
18
18
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
temperature: this.getTemperature(),
|
|
20
20
|
max_tokens: this.getMaxTokens(),
|
|
@@ -42,7 +42,7 @@ export class DeepSeekAdapter extends BaseLLMAdapter {
|
|
|
42
42
|
getMetadata(): LLMMetadata {
|
|
43
43
|
return {
|
|
44
44
|
provider: 'deepseek',
|
|
45
|
-
model: this.config.model || 'deepseek
|
|
45
|
+
model: this.config.model || getDefaultModel('deepseek'),
|
|
46
46
|
};
|
|
47
47
|
}
|
|
48
48
|
}
|
package/src/llm/google.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class GoogleAdapter extends BaseLLMAdapter {
|
|
@@ -7,7 +7,7 @@ export class GoogleAdapter extends BaseLLMAdapter {
|
|
|
7
7
|
}
|
|
8
8
|
|
|
9
9
|
protected async chatImpl(messages: LLMMessage[]): Promise<LLMResponse> {
|
|
10
|
-
const model = this.config.model || '
|
|
10
|
+
const model = this.config.model || getDefaultModel('google');
|
|
11
11
|
const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${this.config.apiKey}`;
|
|
12
12
|
|
|
13
13
|
const systemMessage = messages.find(m => m.role === 'system');
|
|
@@ -57,7 +57,7 @@ export class GoogleAdapter extends BaseLLMAdapter {
|
|
|
57
57
|
getMetadata(): LLMMetadata {
|
|
58
58
|
return {
|
|
59
59
|
provider: 'google',
|
|
60
|
-
model: this.config.model || '
|
|
60
|
+
model: this.config.model || getDefaultModel('google'),
|
|
61
61
|
};
|
|
62
62
|
}
|
|
63
63
|
}
|
package/src/llm/groq.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class GroqAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class GroqAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
Authorization: `Bearer ${this.config.apiKey}`,
|
|
15
15
|
},
|
|
16
16
|
body: JSON.stringify({
|
|
17
|
-
model: this.config.model || '
|
|
17
|
+
model: this.config.model || getDefaultModel('groq'),
|
|
18
18
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
temperature: this.getTemperature(),
|
|
20
20
|
max_tokens: this.getMaxTokens(),
|
|
@@ -42,7 +42,7 @@ export class GroqAdapter extends BaseLLMAdapter {
|
|
|
42
42
|
getMetadata(): LLMMetadata {
|
|
43
43
|
return {
|
|
44
44
|
provider: 'groq',
|
|
45
|
-
model: this.config.model || '
|
|
45
|
+
model: this.config.model || getDefaultModel('groq'),
|
|
46
46
|
};
|
|
47
47
|
}
|
|
48
48
|
}
|
package/src/llm/mistral.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class MistralAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class MistralAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
Authorization: `Bearer ${this.config.apiKey}`,
|
|
15
15
|
},
|
|
16
16
|
body: JSON.stringify({
|
|
17
|
-
model: this.config.model || 'mistral
|
|
17
|
+
model: this.config.model || getDefaultModel('mistral'),
|
|
18
18
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
temperature: this.getTemperature(),
|
|
20
20
|
max_tokens: this.getMaxTokens(),
|
|
@@ -42,7 +42,7 @@ export class MistralAdapter extends BaseLLMAdapter {
|
|
|
42
42
|
getMetadata(): LLMMetadata {
|
|
43
43
|
return {
|
|
44
44
|
provider: 'mistral',
|
|
45
|
-
model: this.config.model || 'mistral
|
|
45
|
+
model: this.config.model || getDefaultModel('mistral'),
|
|
46
46
|
};
|
|
47
47
|
}
|
|
48
48
|
}
|
package/src/llm/ollama.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class OllamaAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class OllamaAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
method: 'POST',
|
|
15
15
|
headers: { 'Content-Type': 'application/json' },
|
|
16
16
|
body: JSON.stringify({
|
|
17
|
-
model: this.config.model || '
|
|
17
|
+
model: this.config.model || getDefaultModel('ollama'),
|
|
18
18
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
stream: false,
|
|
20
20
|
options: {
|
|
@@ -46,7 +46,7 @@ export class OllamaAdapter extends BaseLLMAdapter {
|
|
|
46
46
|
getMetadata(): LLMMetadata {
|
|
47
47
|
return {
|
|
48
48
|
provider: 'ollama',
|
|
49
|
-
model: this.config.model || '
|
|
49
|
+
model: this.config.model || getDefaultModel('ollama'),
|
|
50
50
|
};
|
|
51
51
|
}
|
|
52
52
|
}
|
package/src/llm/openai.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, shouldUseOpenAIResponses, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class OpenAIAdapter extends BaseLLMAdapter {
|
|
@@ -10,6 +10,11 @@ export class OpenAIAdapter extends BaseLLMAdapter {
|
|
|
10
10
|
}
|
|
11
11
|
|
|
12
12
|
protected async chatImpl(messages: LLMMessage[]): Promise<LLMResponse> {
|
|
13
|
+
const model = this.config.model || getDefaultModel('openai');
|
|
14
|
+
if (shouldUseOpenAIResponses(model)) {
|
|
15
|
+
return this.chatResponses(model, messages);
|
|
16
|
+
}
|
|
17
|
+
|
|
13
18
|
const res = await this.fetchWithTimeout(`${this.endpoint}/chat/completions`, {
|
|
14
19
|
method: 'POST',
|
|
15
20
|
headers: {
|
|
@@ -17,7 +22,7 @@ export class OpenAIAdapter extends BaseLLMAdapter {
|
|
|
17
22
|
Authorization: `Bearer ${this.config.apiKey}`,
|
|
18
23
|
},
|
|
19
24
|
body: JSON.stringify({
|
|
20
|
-
model
|
|
25
|
+
model,
|
|
21
26
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
22
27
|
temperature: this.getTemperature(),
|
|
23
28
|
max_tokens: this.getMaxTokens(),
|
|
@@ -42,10 +47,48 @@ export class OpenAIAdapter extends BaseLLMAdapter {
|
|
|
42
47
|
};
|
|
43
48
|
}
|
|
44
49
|
|
|
50
|
+
private async chatResponses(model: string, messages: LLMMessage[]): Promise<LLMResponse> {
|
|
51
|
+
const res = await this.fetchWithTimeout(`${this.endpoint}/responses`, {
|
|
52
|
+
method: 'POST',
|
|
53
|
+
headers: {
|
|
54
|
+
'Content-Type': 'application/json',
|
|
55
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
56
|
+
},
|
|
57
|
+
body: JSON.stringify({
|
|
58
|
+
model,
|
|
59
|
+
input: messages.map(m => ({
|
|
60
|
+
role: m.role === 'system' ? 'developer' : m.role,
|
|
61
|
+
content: m.content,
|
|
62
|
+
})),
|
|
63
|
+
max_output_tokens: this.getMaxTokens(),
|
|
64
|
+
}),
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
if (!res.ok) {
|
|
68
|
+
const body = await res.text();
|
|
69
|
+
throw new Error(`OpenAI API error ${res.status}: ${body}`);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const data = await res.json() as {
|
|
73
|
+
output_text?: string;
|
|
74
|
+
output?: Array<{ content?: Array<{ text?: string }> }>;
|
|
75
|
+
usage?: { input_tokens?: number; output_tokens?: number };
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
return {
|
|
79
|
+
content: data.output_text
|
|
80
|
+
|| data.output?.flatMap(item => item.content?.map(content => content.text || '') || []).join('')
|
|
81
|
+
|| '',
|
|
82
|
+
tokens: data.usage
|
|
83
|
+
? { input: data.usage.input_tokens || 0, output: data.usage.output_tokens || 0 }
|
|
84
|
+
: undefined,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
45
88
|
getMetadata(): LLMMetadata {
|
|
46
89
|
return {
|
|
47
90
|
provider: 'openai',
|
|
48
|
-
model: this.config.model || '
|
|
91
|
+
model: this.config.model || getDefaultModel('openai'),
|
|
49
92
|
};
|
|
50
93
|
}
|
|
51
94
|
}
|
package/src/llm/together.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { getDefaultModel, type LLMMessage, type LLMResponse, type LLMMetadata, type LLMConfig } from '@exagent/sdk';
|
|
2
2
|
import { BaseLLMAdapter } from './base.js';
|
|
3
3
|
|
|
4
4
|
export class TogetherAdapter extends BaseLLMAdapter {
|
|
@@ -14,7 +14,7 @@ export class TogetherAdapter extends BaseLLMAdapter {
|
|
|
14
14
|
Authorization: `Bearer ${this.config.apiKey}`,
|
|
15
15
|
},
|
|
16
16
|
body: JSON.stringify({
|
|
17
|
-
model: this.config.model || '
|
|
17
|
+
model: this.config.model || getDefaultModel('together'),
|
|
18
18
|
messages: messages.map(m => ({ role: m.role, content: m.content })),
|
|
19
19
|
temperature: this.getTemperature(),
|
|
20
20
|
max_tokens: this.getMaxTokens(),
|
|
@@ -42,7 +42,7 @@ export class TogetherAdapter extends BaseLLMAdapter {
|
|
|
42
42
|
getMetadata(): LLMMetadata {
|
|
43
43
|
return {
|
|
44
44
|
provider: 'together',
|
|
45
|
-
model: this.config.model || '
|
|
45
|
+
model: this.config.model || getDefaultModel('together'),
|
|
46
46
|
};
|
|
47
47
|
}
|
|
48
48
|
}
|