@involvex/super-agent-cli 0.0.47 → 0.0.49
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +280 -71
- package/dist/super-agent-cli.exe +0 -0
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -146,21 +146,24 @@ class SettingsManager {
|
|
|
146
146
|
this.saveUserSettings(settings);
|
|
147
147
|
}
|
|
148
148
|
}
|
|
149
|
-
getAvailableModels() {
|
|
149
|
+
getAvailableModels(providerId) {
|
|
150
|
+
const activeProvider = providerId || this.getActiveProviderConfig()?.id || "grok";
|
|
151
|
+
let models = PROVIDER_MODELS[activeProvider];
|
|
152
|
+
if (!models) {
|
|
153
|
+
const config = this.getEffectiveSettings().providers[activeProvider];
|
|
154
|
+
if (config && PROVIDER_MODELS[config.provider]) {
|
|
155
|
+
models = PROVIDER_MODELS[config.provider];
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
if (models) {
|
|
159
|
+
return models;
|
|
160
|
+
}
|
|
150
161
|
return [
|
|
151
162
|
"grok-beta",
|
|
152
163
|
"grok-vision-beta",
|
|
153
164
|
"grok-2-vision-1212",
|
|
154
165
|
"grok-2-1212",
|
|
155
|
-
"grok-code-fast-1"
|
|
156
|
-
"gpt-4o",
|
|
157
|
-
"gpt-4o-mini",
|
|
158
|
-
"o1-preview",
|
|
159
|
-
"o1-mini",
|
|
160
|
-
"gemini-3-pro-preview",
|
|
161
|
-
"gemini-2.5-pro",
|
|
162
|
-
"gemini-2.5-flash",
|
|
163
|
-
"GLM-4.7"
|
|
166
|
+
"grok-code-fast-1"
|
|
164
167
|
];
|
|
165
168
|
}
|
|
166
169
|
getApiKey() {
|
|
@@ -181,18 +184,58 @@ class SettingsManager {
|
|
|
181
184
|
function getSettingsManager() {
|
|
182
185
|
return SettingsManager.getInstance();
|
|
183
186
|
}
|
|
184
|
-
var SETTINGS_VERSION = 2, DEFAULT_USER_SETTINGS, DEFAULT_PROJECT_SETTINGS;
|
|
187
|
+
var SETTINGS_VERSION = 2, PROVIDER_MODELS, DEFAULT_USER_SETTINGS, DEFAULT_PROJECT_SETTINGS;
|
|
185
188
|
var init_settings_manager = __esm(() => {
|
|
189
|
+
PROVIDER_MODELS = {
|
|
190
|
+
grok: [
|
|
191
|
+
"grok-beta",
|
|
192
|
+
"grok-vision-beta",
|
|
193
|
+
"grok-2-vision-1212",
|
|
194
|
+
"grok-2-1212",
|
|
195
|
+
"grok-code-fast-1"
|
|
196
|
+
],
|
|
197
|
+
openai: [
|
|
198
|
+
"gpt-4o",
|
|
199
|
+
"gpt-4o-mini",
|
|
200
|
+
"o1-preview",
|
|
201
|
+
"o1-mini",
|
|
202
|
+
"gpt-4-turbo",
|
|
203
|
+
"gpt-3.5-turbo"
|
|
204
|
+
],
|
|
205
|
+
gemini: [
|
|
206
|
+
"gemini-2.0-flash",
|
|
207
|
+
"gemini-2.0-pro-exp-02-05",
|
|
208
|
+
"gemini-1.5-pro",
|
|
209
|
+
"gemini-1.5-flash"
|
|
210
|
+
],
|
|
211
|
+
mistral: ["mistral-large-latest", "mistral-small-latest", "codestral-latest"],
|
|
212
|
+
openrouter: [
|
|
213
|
+
"anthropic/claude-3.5-sonnet",
|
|
214
|
+
"anthropic/claude-3-opus",
|
|
215
|
+
"meta-llama/llama-3.1-70b-instruct",
|
|
216
|
+
"mistralai/mistral-large",
|
|
217
|
+
"google/gemini-flash-1.5"
|
|
218
|
+
],
|
|
219
|
+
minimax: ["abab6.5s-chat"],
|
|
220
|
+
groq: [
|
|
221
|
+
"llama-3.3-70b-versatile",
|
|
222
|
+
"llama-3.1-8b-instant",
|
|
223
|
+
"mixtral-8x7b-32768"
|
|
224
|
+
],
|
|
225
|
+
deepseek: ["deepseek-chat", "deepseek-coder"],
|
|
226
|
+
ollama: ["llama3", "mistral", "codellama"],
|
|
227
|
+
"workers-ai": ["@cf/meta/llama-3.1-70b-instruct"]
|
|
228
|
+
};
|
|
186
229
|
DEFAULT_USER_SETTINGS = {
|
|
187
|
-
active_provider: "
|
|
230
|
+
active_provider: "grok",
|
|
188
231
|
providers: {
|
|
189
|
-
|
|
190
|
-
id: "
|
|
191
|
-
provider: "
|
|
192
|
-
model: "
|
|
232
|
+
grok: {
|
|
233
|
+
id: "grok",
|
|
234
|
+
provider: "grok",
|
|
235
|
+
model: "grok-code-fast-1",
|
|
193
236
|
api_key: "",
|
|
194
|
-
base_url: "https://api.
|
|
195
|
-
default_model: "
|
|
237
|
+
base_url: "https://api.x.ai/v1",
|
|
238
|
+
default_model: "grok-code-fast-1"
|
|
196
239
|
},
|
|
197
240
|
openai: {
|
|
198
241
|
id: "openai",
|
|
@@ -205,10 +248,66 @@ var init_settings_manager = __esm(() => {
|
|
|
205
248
|
gemini: {
|
|
206
249
|
id: "gemini",
|
|
207
250
|
provider: "gemini",
|
|
208
|
-
model: "gemini-
|
|
251
|
+
model: "gemini-2.0-flash",
|
|
209
252
|
api_key: "",
|
|
210
253
|
base_url: "",
|
|
211
|
-
default_model: "gemini-
|
|
254
|
+
default_model: "gemini-2.0-flash"
|
|
255
|
+
},
|
|
256
|
+
mistral: {
|
|
257
|
+
id: "mistral",
|
|
258
|
+
provider: "mistral",
|
|
259
|
+
model: "mistral-large-latest",
|
|
260
|
+
api_key: "",
|
|
261
|
+
base_url: "https://api.mistral.ai/v1",
|
|
262
|
+
default_model: "mistral-large-latest"
|
|
263
|
+
},
|
|
264
|
+
openrouter: {
|
|
265
|
+
id: "openrouter",
|
|
266
|
+
provider: "openrouter",
|
|
267
|
+
model: "anthropic/claude-3.5-sonnet",
|
|
268
|
+
api_key: "",
|
|
269
|
+
base_url: "https://openrouter.ai/api/v1",
|
|
270
|
+
default_model: "anthropic/claude-3.5-sonnet"
|
|
271
|
+
},
|
|
272
|
+
minimax: {
|
|
273
|
+
id: "minimax",
|
|
274
|
+
provider: "minimax",
|
|
275
|
+
model: "abab6.5s-chat",
|
|
276
|
+
api_key: "",
|
|
277
|
+
base_url: "https://api.minimax.chat/v1",
|
|
278
|
+
default_model: "abab6.5s-chat"
|
|
279
|
+
},
|
|
280
|
+
groq: {
|
|
281
|
+
id: "groq",
|
|
282
|
+
provider: "groq",
|
|
283
|
+
model: "llama-3.3-70b-versatile",
|
|
284
|
+
api_key: "",
|
|
285
|
+
base_url: "https://api.groq.com/openai/v1",
|
|
286
|
+
default_model: "llama-3.3-70b-versatile"
|
|
287
|
+
},
|
|
288
|
+
deepseek: {
|
|
289
|
+
id: "deepseek",
|
|
290
|
+
provider: "deepseek",
|
|
291
|
+
model: "deepseek-coder",
|
|
292
|
+
api_key: "",
|
|
293
|
+
base_url: "https://api.deepseek.com/v1",
|
|
294
|
+
default_model: "deepseek-coder"
|
|
295
|
+
},
|
|
296
|
+
ollama: {
|
|
297
|
+
id: "ollama",
|
|
298
|
+
provider: "ollama",
|
|
299
|
+
model: "llama3",
|
|
300
|
+
api_key: "ollama",
|
|
301
|
+
base_url: "http://localhost:11434/v1",
|
|
302
|
+
default_model: "llama3"
|
|
303
|
+
},
|
|
304
|
+
"workers-ai": {
|
|
305
|
+
id: "workers-ai",
|
|
306
|
+
provider: "workers-ai",
|
|
307
|
+
model: "@cf/meta/llama-3.1-70b-instruct",
|
|
308
|
+
api_key: "",
|
|
309
|
+
base_url: "https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/v1",
|
|
310
|
+
default_model: "@cf/meta/llama-3.1-70b-instruct"
|
|
212
311
|
}
|
|
213
312
|
},
|
|
214
313
|
ui: {
|
|
@@ -1232,9 +1331,12 @@ function useInputHandler({
|
|
|
1232
1331
|
{ command: "/commit-and-push", description: "AI commit & push to remote" },
|
|
1233
1332
|
{ command: "/exit", description: "Exit the application" }
|
|
1234
1333
|
];
|
|
1334
|
+
const [activeProvider, setActiveProvider] = useState3(() => {
|
|
1335
|
+
return getSettingsManager().loadUserSettings().active_provider;
|
|
1336
|
+
});
|
|
1235
1337
|
const availableModels = useMemo2(() => {
|
|
1236
1338
|
return loadModelConfig();
|
|
1237
|
-
}, []);
|
|
1339
|
+
}, [activeProvider]);
|
|
1238
1340
|
const handleDirectCommand = async (input2) => {
|
|
1239
1341
|
const trimmedInput = input2.trim();
|
|
1240
1342
|
if (trimmedInput === "/clear") {
|
|
@@ -1305,15 +1407,27 @@ Config Commands:
|
|
|
1305
1407
|
const settings = manager.loadUserSettings();
|
|
1306
1408
|
if (settings.providers && settings.providers[providerId]) {
|
|
1307
1409
|
manager.updateUserSetting("active_provider", providerId);
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1410
|
+
try {
|
|
1411
|
+
agent.setProvider(providerId);
|
|
1412
|
+
setActiveProvider(providerId);
|
|
1413
|
+
setChatHistory((prev) => [
|
|
1414
|
+
...prev,
|
|
1415
|
+
{
|
|
1416
|
+
type: "assistant",
|
|
1417
|
+
content: `✓ Switched active provider to: ${providerId}`,
|
|
1418
|
+
timestamp: new Date
|
|
1419
|
+
}
|
|
1420
|
+
]);
|
|
1421
|
+
} catch (error) {
|
|
1422
|
+
setChatHistory((prev) => [
|
|
1423
|
+
...prev,
|
|
1424
|
+
{
|
|
1425
|
+
type: "assistant",
|
|
1426
|
+
content: `❌ Failed to switch provider: ${error.message}`,
|
|
1427
|
+
timestamp: new Date
|
|
1428
|
+
}
|
|
1429
|
+
]);
|
|
1430
|
+
}
|
|
1317
1431
|
} else {
|
|
1318
1432
|
setChatHistory((prev) => [
|
|
1319
1433
|
...prev,
|
|
@@ -5197,6 +5311,78 @@ async function getAllSuperAgentTools() {
|
|
|
5197
5311
|
return addMCPToolsToSuperAgentTools(SUPER_AGENT_TOOLS);
|
|
5198
5312
|
}
|
|
5199
5313
|
|
|
5314
|
+
// src/core/providers/openai-compatible.ts
|
|
5315
|
+
import OpenAI from "openai";
|
|
5316
|
+
|
|
5317
|
+
class OpenAICompatibleProvider {
|
|
5318
|
+
name;
|
|
5319
|
+
client;
|
|
5320
|
+
currentModel;
|
|
5321
|
+
defaultMaxTokens;
|
|
5322
|
+
constructor(apiKey, baseURL, model, name = "openai-compatible") {
|
|
5323
|
+
this.name = name;
|
|
5324
|
+
this.client = new OpenAI({
|
|
5325
|
+
apiKey: apiKey || "dummy-key",
|
|
5326
|
+
baseURL,
|
|
5327
|
+
timeout: 360000
|
|
5328
|
+
});
|
|
5329
|
+
this.currentModel = model;
|
|
5330
|
+
const envMax = Number(process.env.SUPER_AGENT_MAX_TOKENS);
|
|
5331
|
+
this.defaultMaxTokens = Number.isFinite(envMax) && envMax > 0 ? envMax : 4096;
|
|
5332
|
+
}
|
|
5333
|
+
setModel(model) {
|
|
5334
|
+
this.currentModel = model;
|
|
5335
|
+
}
|
|
5336
|
+
getCurrentModel() {
|
|
5337
|
+
return this.currentModel;
|
|
5338
|
+
}
|
|
5339
|
+
async chat(messages, options) {
|
|
5340
|
+
try {
|
|
5341
|
+
const model = options?.model || this.currentModel;
|
|
5342
|
+
const tools = options?.tools || [];
|
|
5343
|
+
const payload = {
|
|
5344
|
+
model,
|
|
5345
|
+
messages,
|
|
5346
|
+
tools: tools.length > 0 ? tools : undefined,
|
|
5347
|
+
tool_choice: tools.length > 0 ? "auto" : undefined,
|
|
5348
|
+
temperature: 0.7,
|
|
5349
|
+
max_tokens: this.defaultMaxTokens
|
|
5350
|
+
};
|
|
5351
|
+
if (options?.search_parameters) {
|
|
5352
|
+
payload.search_parameters = options.search_parameters;
|
|
5353
|
+
}
|
|
5354
|
+
const response = await this.client.chat.completions.create(payload);
|
|
5355
|
+
return response;
|
|
5356
|
+
} catch (error) {
|
|
5357
|
+
throw new Error(`${this.name} API error: ${error.message}`);
|
|
5358
|
+
}
|
|
5359
|
+
}
|
|
5360
|
+
async* chatStream(messages, options) {
|
|
5361
|
+
try {
|
|
5362
|
+
const model = options?.model || this.currentModel;
|
|
5363
|
+
const tools = options?.tools || [];
|
|
5364
|
+
const payload = {
|
|
5365
|
+
model,
|
|
5366
|
+
messages,
|
|
5367
|
+
tools: tools.length > 0 ? tools : undefined,
|
|
5368
|
+
tool_choice: tools.length > 0 ? "auto" : undefined,
|
|
5369
|
+
temperature: 0.7,
|
|
5370
|
+
max_tokens: this.defaultMaxTokens,
|
|
5371
|
+
stream: true
|
|
5372
|
+
};
|
|
5373
|
+
if (options?.search_parameters) {
|
|
5374
|
+
payload.search_parameters = options.search_parameters;
|
|
5375
|
+
}
|
|
5376
|
+
const stream = await this.client.chat.completions.create(payload);
|
|
5377
|
+
for await (const chunk of stream) {
|
|
5378
|
+
yield chunk;
|
|
5379
|
+
}
|
|
5380
|
+
} catch (error) {
|
|
5381
|
+
throw new Error(`${this.name} API error: ${error.message}`);
|
|
5382
|
+
}
|
|
5383
|
+
}
|
|
5384
|
+
}
|
|
5385
|
+
|
|
5200
5386
|
// src/utils/custom-instructions.ts
|
|
5201
5387
|
import * as path7 from "path";
|
|
5202
5388
|
import * as os2 from "os";
|
|
@@ -5224,7 +5410,7 @@ function loadCustomInstructions(workingDirectory = process.cwd()) {
|
|
|
5224
5410
|
init_settings_manager();
|
|
5225
5411
|
|
|
5226
5412
|
// src/core/providers/openai.ts
|
|
5227
|
-
import
|
|
5413
|
+
import OpenAI2 from "openai";
|
|
5228
5414
|
|
|
5229
5415
|
class OpenAIProvider {
|
|
5230
5416
|
name = "openai";
|
|
@@ -5232,7 +5418,7 @@ class OpenAIProvider {
|
|
|
5232
5418
|
currentModel;
|
|
5233
5419
|
defaultMaxTokens;
|
|
5234
5420
|
constructor(apiKey, baseURL, headerModel) {
|
|
5235
|
-
this.client = new
|
|
5421
|
+
this.client = new OpenAI2({
|
|
5236
5422
|
apiKey,
|
|
5237
5423
|
baseURL: baseURL || "https://api.openai.com/v1",
|
|
5238
5424
|
timeout: 360000
|
|
@@ -5452,7 +5638,7 @@ class GeminiProvider {
|
|
|
5452
5638
|
}
|
|
5453
5639
|
|
|
5454
5640
|
// src/core/providers/grok.ts
|
|
5455
|
-
import
|
|
5641
|
+
import OpenAI3 from "openai";
|
|
5456
5642
|
|
|
5457
5643
|
class GrokProvider {
|
|
5458
5644
|
name = "grok";
|
|
@@ -5460,7 +5646,7 @@ class GrokProvider {
|
|
|
5460
5646
|
currentModel;
|
|
5461
5647
|
defaultMaxTokens;
|
|
5462
5648
|
constructor(apiKey, baseURL, headerModel) {
|
|
5463
|
-
this.client = new
|
|
5649
|
+
this.client = new OpenAI3({
|
|
5464
5650
|
apiKey,
|
|
5465
5651
|
baseURL: baseURL || "https://api.x.ai/v1",
|
|
5466
5652
|
timeout: 360000
|
|
@@ -5545,19 +5731,24 @@ class SuperAgent extends EventEmitter4 {
|
|
|
5545
5731
|
super();
|
|
5546
5732
|
const manager = getSettingsManager();
|
|
5547
5733
|
const settings = manager.loadUserSettings();
|
|
5548
|
-
|
|
5549
|
-
|
|
5550
|
-
|
|
5551
|
-
|
|
5552
|
-
|
|
5553
|
-
const
|
|
5734
|
+
let activeProviderId = (settings.active_provider || "grok").toLowerCase();
|
|
5735
|
+
if (activeProviderId === "zai") {
|
|
5736
|
+
activeProviderId = "grok";
|
|
5737
|
+
}
|
|
5738
|
+
const providerConfig = settings.providers[activeProviderId];
|
|
5739
|
+
const providerType = providerConfig?.provider || activeProviderId;
|
|
5740
|
+
const effectiveApiKey = apiKey || providerConfig?.api_key || "";
|
|
5741
|
+
const effectiveBaseURL = baseURL || (providerConfig?.base_url ? providerConfig.base_url : undefined);
|
|
5742
|
+
const effectiveModel = model || providerConfig?.model || providerConfig?.default_model || "grok-code-fast-1";
|
|
5554
5743
|
this.maxToolRounds = maxToolRounds || 400;
|
|
5555
|
-
if (
|
|
5556
|
-
this.superAgentClient = new OpenAIProvider(
|
|
5557
|
-
} else if (
|
|
5558
|
-
this.superAgentClient = new GeminiProvider(
|
|
5744
|
+
if (providerType === "openai") {
|
|
5745
|
+
this.superAgentClient = new OpenAIProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5746
|
+
} else if (providerType === "gemini" || providerType === "google") {
|
|
5747
|
+
this.superAgentClient = new GeminiProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5748
|
+
} else if (providerType === "grok") {
|
|
5749
|
+
this.superAgentClient = new GrokProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5559
5750
|
} else {
|
|
5560
|
-
this.superAgentClient = new
|
|
5751
|
+
this.superAgentClient = new OpenAICompatibleProvider(effectiveApiKey, effectiveBaseURL || "", effectiveModel, activeProviderId);
|
|
5561
5752
|
}
|
|
5562
5753
|
this.textEditor = new TextEditorTool;
|
|
5563
5754
|
this.morphEditor = process.env.MORPH_API_KEY ? new MorphEditorTool : null;
|
|
@@ -5566,7 +5757,7 @@ class SuperAgent extends EventEmitter4 {
|
|
|
5566
5757
|
this.confirmationTool = new ConfirmationTool;
|
|
5567
5758
|
this.search = new SearchTool;
|
|
5568
5759
|
this.projectMap = new ProjectMapTool;
|
|
5569
|
-
this.tokenCounter = createTokenCounter(
|
|
5760
|
+
this.tokenCounter = createTokenCounter(effectiveModel);
|
|
5570
5761
|
this.initializeMCP();
|
|
5571
5762
|
const customInstructions = loadCustomInstructions();
|
|
5572
5763
|
const customInstructionsSection = customInstructions ? `
|
|
@@ -5637,6 +5828,30 @@ IMPORTANT RESPONSE GUIDELINES:
|
|
|
5637
5828
|
Current working directory: ${process.cwd()}`
|
|
5638
5829
|
});
|
|
5639
5830
|
}
|
|
5831
|
+
setProvider(providerId) {
|
|
5832
|
+
const manager = getSettingsManager();
|
|
5833
|
+
const settings = manager.loadUserSettings();
|
|
5834
|
+
const activeProviderId = (providerId || "grok").toLowerCase();
|
|
5835
|
+
if (activeProviderId === "zai") {}
|
|
5836
|
+
const providerConfig = settings.providers[activeProviderId];
|
|
5837
|
+
if (!providerConfig) {
|
|
5838
|
+
throw new Error(`Provider '${activeProviderId}' not configured.`);
|
|
5839
|
+
}
|
|
5840
|
+
const providerType = providerConfig.provider || activeProviderId;
|
|
5841
|
+
const effectiveApiKey = providerConfig.api_key || "";
|
|
5842
|
+
const effectiveBaseURL = providerConfig.base_url || undefined;
|
|
5843
|
+
const effectiveModel = providerConfig.model || providerConfig.default_model || "grok-code-fast-1";
|
|
5844
|
+
if (providerType === "openai") {
|
|
5845
|
+
this.superAgentClient = new OpenAIProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5846
|
+
} else if (providerType === "gemini" || providerType === "google") {
|
|
5847
|
+
this.superAgentClient = new GeminiProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5848
|
+
} else if (providerType === "grok") {
|
|
5849
|
+
this.superAgentClient = new GrokProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
|
|
5850
|
+
} else {
|
|
5851
|
+
this.superAgentClient = new OpenAICompatibleProvider(effectiveApiKey, effectiveBaseURL || "", effectiveModel, activeProviderId);
|
|
5852
|
+
}
|
|
5853
|
+
this.tokenCounter = createTokenCounter(effectiveModel);
|
|
5854
|
+
}
|
|
5640
5855
|
async initializeMCP() {
|
|
5641
5856
|
Promise.resolve().then(async () => {
|
|
5642
5857
|
try {
|
|
@@ -5652,7 +5867,7 @@ Current working directory: ${process.cwd()}`
|
|
|
5652
5867
|
});
|
|
5653
5868
|
}
|
|
5654
5869
|
isGrokModel() {
|
|
5655
|
-
return this.superAgentClient.name === "grok";
|
|
5870
|
+
return this.superAgentClient.name === "grok" || this.superAgentClient.name.includes("grok");
|
|
5656
5871
|
}
|
|
5657
5872
|
shouldUseSearchFor(message) {
|
|
5658
5873
|
const q = message.toLowerCase();
|
|
@@ -6069,6 +6284,22 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
|
|
|
6069
6284
|
};
|
|
6070
6285
|
}
|
|
6071
6286
|
}
|
|
6287
|
+
abortCurrentOperation() {
|
|
6288
|
+
if (this.abortController) {
|
|
6289
|
+
this.abortController.abort();
|
|
6290
|
+
this.abortController = null;
|
|
6291
|
+
}
|
|
6292
|
+
}
|
|
6293
|
+
setModel(model) {
|
|
6294
|
+
this.superAgentClient.setModel(model);
|
|
6295
|
+
this.tokenCounter = createTokenCounter(model);
|
|
6296
|
+
}
|
|
6297
|
+
getCurrentModel() {
|
|
6298
|
+
return this.superAgentClient.getCurrentModel();
|
|
6299
|
+
}
|
|
6300
|
+
async executeBashCommand(command) {
|
|
6301
|
+
return await this.bash.execute(command);
|
|
6302
|
+
}
|
|
6072
6303
|
async executeMCPTool(toolCall) {
|
|
6073
6304
|
try {
|
|
6074
6305
|
const args = JSON.parse(toolCall.function.arguments);
|
|
@@ -6091,7 +6322,7 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
|
|
|
6091
6322
|
`);
|
|
6092
6323
|
return {
|
|
6093
6324
|
success: true,
|
|
6094
|
-
output
|
|
6325
|
+
output
|
|
6095
6326
|
};
|
|
6096
6327
|
} catch (error) {
|
|
6097
6328
|
return {
|
|
@@ -6100,28 +6331,6 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
|
|
|
6100
6331
|
};
|
|
6101
6332
|
}
|
|
6102
6333
|
}
|
|
6103
|
-
getChatHistory() {
|
|
6104
|
-
return [...this.chatHistory];
|
|
6105
|
-
}
|
|
6106
|
-
getCurrentDirectory() {
|
|
6107
|
-
return this.bash.getCurrentDirectory();
|
|
6108
|
-
}
|
|
6109
|
-
async executeBashCommand(command) {
|
|
6110
|
-
return await this.bash.execute(command);
|
|
6111
|
-
}
|
|
6112
|
-
getCurrentModel() {
|
|
6113
|
-
return this.superAgentClient.getCurrentModel();
|
|
6114
|
-
}
|
|
6115
|
-
setModel(model) {
|
|
6116
|
-
this.superAgentClient.setModel(model);
|
|
6117
|
-
this.tokenCounter.dispose();
|
|
6118
|
-
this.tokenCounter = createTokenCounter(model);
|
|
6119
|
-
}
|
|
6120
|
-
abortCurrentOperation() {
|
|
6121
|
-
if (this.abortController) {
|
|
6122
|
-
this.abortController.abort();
|
|
6123
|
-
}
|
|
6124
|
-
}
|
|
6125
6334
|
}
|
|
6126
6335
|
|
|
6127
6336
|
// src/ui/components/api-key-input.tsx
|
|
@@ -7084,7 +7293,7 @@ import { program } from "commander";
|
|
|
7084
7293
|
// package.json
|
|
7085
7294
|
var package_default = {
|
|
7086
7295
|
name: "@involvex/super-agent-cli",
|
|
7087
|
-
version: "0.0.
|
|
7296
|
+
version: "0.0.49",
|
|
7088
7297
|
description: "An open-source AI agent that brings the power of Super Agent directly into your terminal.",
|
|
7089
7298
|
keywords: [
|
|
7090
7299
|
"cli",
|
package/dist/super-agent-cli.exe
CHANGED
|
Binary file
|