@involvex/super-agent-cli 0.0.46 → 0.0.48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -184,15 +184,15 @@ function getSettingsManager() {
184
184
  var SETTINGS_VERSION = 2, DEFAULT_USER_SETTINGS, DEFAULT_PROJECT_SETTINGS;
185
185
  var init_settings_manager = __esm(() => {
186
186
  DEFAULT_USER_SETTINGS = {
187
- active_provider: "zai",
187
+ active_provider: "grok",
188
188
  providers: {
189
- zai: {
190
- id: "zai",
191
- provider: "zai",
192
- model: "glm-4.7",
189
+ grok: {
190
+ id: "grok",
191
+ provider: "grok",
192
+ model: "grok-code-fast-1",
193
193
  api_key: "",
194
- base_url: "https://api.z.ai/api/paas/v4",
195
- default_model: "glm-4.7"
194
+ base_url: "https://api.x.ai/v1",
195
+ default_model: "grok-code-fast-1"
196
196
  },
197
197
  openai: {
198
198
  id: "openai",
@@ -205,10 +205,66 @@ var init_settings_manager = __esm(() => {
205
205
  gemini: {
206
206
  id: "gemini",
207
207
  provider: "gemini",
208
- model: "gemini-3-flash-preview",
208
+ model: "gemini-2.0-flash",
209
209
  api_key: "",
210
210
  base_url: "",
211
- default_model: "gemini-3-flash-preview"
211
+ default_model: "gemini-2.0-flash"
212
+ },
213
+ mistral: {
214
+ id: "mistral",
215
+ provider: "mistral",
216
+ model: "mistral-large-latest",
217
+ api_key: "",
218
+ base_url: "https://api.mistral.ai/v1",
219
+ default_model: "mistral-large-latest"
220
+ },
221
+ openrouter: {
222
+ id: "openrouter",
223
+ provider: "openrouter",
224
+ model: "anthropic/claude-3.5-sonnet",
225
+ api_key: "",
226
+ base_url: "https://openrouter.ai/api/v1",
227
+ default_model: "anthropic/claude-3.5-sonnet"
228
+ },
229
+ minimax: {
230
+ id: "minimax",
231
+ provider: "minimax",
232
+ model: "abab6.5s-chat",
233
+ api_key: "",
234
+ base_url: "https://api.minimax.chat/v1",
235
+ default_model: "abab6.5s-chat"
236
+ },
237
+ groq: {
238
+ id: "groq",
239
+ provider: "groq",
240
+ model: "llama-3.3-70b-versatile",
241
+ api_key: "",
242
+ base_url: "https://api.groq.com/openai/v1",
243
+ default_model: "llama-3.3-70b-versatile"
244
+ },
245
+ deepseek: {
246
+ id: "deepseek",
247
+ provider: "deepseek",
248
+ model: "deepseek-coder",
249
+ api_key: "",
250
+ base_url: "https://api.deepseek.com/v1",
251
+ default_model: "deepseek-coder"
252
+ },
253
+ ollama: {
254
+ id: "ollama",
255
+ provider: "ollama",
256
+ model: "llama3",
257
+ api_key: "ollama",
258
+ base_url: "http://localhost:11434/v1",
259
+ default_model: "llama3"
260
+ },
261
+ "workers-ai": {
262
+ id: "workers-ai",
263
+ provider: "workers-ai",
264
+ model: "@cf/meta/llama-3.1-70b-instruct",
265
+ api_key: "",
266
+ base_url: "https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/v1",
267
+ default_model: "@cf/meta/llama-3.1-70b-instruct"
212
268
  }
213
269
  },
214
270
  ui: {
@@ -5197,6 +5253,78 @@ async function getAllSuperAgentTools() {
5197
5253
  return addMCPToolsToSuperAgentTools(SUPER_AGENT_TOOLS);
5198
5254
  }
5199
5255
 
5256
+ // src/core/providers/openai-compatible.ts
5257
+ import OpenAI from "openai";
5258
+
5259
+ class OpenAICompatibleProvider {
5260
+ name;
5261
+ client;
5262
+ currentModel;
5263
+ defaultMaxTokens;
5264
+ constructor(apiKey, baseURL, model, name = "openai-compatible") {
5265
+ this.name = name;
5266
+ this.client = new OpenAI({
5267
+ apiKey: apiKey || "dummy-key",
5268
+ baseURL,
5269
+ timeout: 360000
5270
+ });
5271
+ this.currentModel = model;
5272
+ const envMax = Number(process.env.SUPER_AGENT_MAX_TOKENS);
5273
+ this.defaultMaxTokens = Number.isFinite(envMax) && envMax > 0 ? envMax : 4096;
5274
+ }
5275
+ setModel(model) {
5276
+ this.currentModel = model;
5277
+ }
5278
+ getCurrentModel() {
5279
+ return this.currentModel;
5280
+ }
5281
+ async chat(messages, options) {
5282
+ try {
5283
+ const model = options?.model || this.currentModel;
5284
+ const tools = options?.tools || [];
5285
+ const payload = {
5286
+ model,
5287
+ messages,
5288
+ tools: tools.length > 0 ? tools : undefined,
5289
+ tool_choice: tools.length > 0 ? "auto" : undefined,
5290
+ temperature: 0.7,
5291
+ max_tokens: this.defaultMaxTokens
5292
+ };
5293
+ if (options?.search_parameters) {
5294
+ payload.search_parameters = options.search_parameters;
5295
+ }
5296
+ const response = await this.client.chat.completions.create(payload);
5297
+ return response;
5298
+ } catch (error) {
5299
+ throw new Error(`${this.name} API error: ${error.message}`);
5300
+ }
5301
+ }
5302
+ async* chatStream(messages, options) {
5303
+ try {
5304
+ const model = options?.model || this.currentModel;
5305
+ const tools = options?.tools || [];
5306
+ const payload = {
5307
+ model,
5308
+ messages,
5309
+ tools: tools.length > 0 ? tools : undefined,
5310
+ tool_choice: tools.length > 0 ? "auto" : undefined,
5311
+ temperature: 0.7,
5312
+ max_tokens: this.defaultMaxTokens,
5313
+ stream: true
5314
+ };
5315
+ if (options?.search_parameters) {
5316
+ payload.search_parameters = options.search_parameters;
5317
+ }
5318
+ const stream = await this.client.chat.completions.create(payload);
5319
+ for await (const chunk of stream) {
5320
+ yield chunk;
5321
+ }
5322
+ } catch (error) {
5323
+ throw new Error(`${this.name} API error: ${error.message}`);
5324
+ }
5325
+ }
5326
+ }
5327
+
5200
5328
  // src/utils/custom-instructions.ts
5201
5329
  import * as path7 from "path";
5202
5330
  import * as os2 from "os";
@@ -5224,7 +5352,7 @@ function loadCustomInstructions(workingDirectory = process.cwd()) {
5224
5352
  init_settings_manager();
5225
5353
 
5226
5354
  // src/core/providers/openai.ts
5227
- import OpenAI from "openai";
5355
+ import OpenAI2 from "openai";
5228
5356
 
5229
5357
  class OpenAIProvider {
5230
5358
  name = "openai";
@@ -5232,7 +5360,7 @@ class OpenAIProvider {
5232
5360
  currentModel;
5233
5361
  defaultMaxTokens;
5234
5362
  constructor(apiKey, baseURL, headerModel) {
5235
- this.client = new OpenAI({
5363
+ this.client = new OpenAI2({
5236
5364
  apiKey,
5237
5365
  baseURL: baseURL || "https://api.openai.com/v1",
5238
5366
  timeout: 360000
@@ -5452,7 +5580,7 @@ class GeminiProvider {
5452
5580
  }
5453
5581
 
5454
5582
  // src/core/providers/grok.ts
5455
- import OpenAI2 from "openai";
5583
+ import OpenAI3 from "openai";
5456
5584
 
5457
5585
  class GrokProvider {
5458
5586
  name = "grok";
@@ -5460,7 +5588,7 @@ class GrokProvider {
5460
5588
  currentModel;
5461
5589
  defaultMaxTokens;
5462
5590
  constructor(apiKey, baseURL, headerModel) {
5463
- this.client = new OpenAI2({
5591
+ this.client = new OpenAI3({
5464
5592
  apiKey,
5465
5593
  baseURL: baseURL || "https://api.x.ai/v1",
5466
5594
  timeout: 360000
@@ -5545,19 +5673,24 @@ class SuperAgent extends EventEmitter4 {
5545
5673
  super();
5546
5674
  const manager = getSettingsManager();
5547
5675
  const settings = manager.loadUserSettings();
5548
- const savedModel = manager.getCurrentModel();
5549
- let activeProvider = (settings.active_provider || "grok").toLowerCase();
5550
- if (activeProvider === "zai") {
5551
- activeProvider = "grok";
5552
- }
5553
- const modelToUse = model || savedModel || "grok-code-fast-1";
5676
+ let activeProviderId = (settings.active_provider || "grok").toLowerCase();
5677
+ if (activeProviderId === "zai") {
5678
+ activeProviderId = "grok";
5679
+ }
5680
+ const providerConfig = settings.providers[activeProviderId];
5681
+ const providerType = providerConfig?.provider || activeProviderId;
5682
+ const effectiveApiKey = apiKey || providerConfig?.api_key || "";
5683
+ const effectiveBaseURL = baseURL || (providerConfig?.base_url ? providerConfig.base_url : undefined);
5684
+ const effectiveModel = model || providerConfig?.model || providerConfig?.default_model || "grok-code-fast-1";
5554
5685
  this.maxToolRounds = maxToolRounds || 400;
5555
- if (activeProvider === "openai") {
5556
- this.superAgentClient = new OpenAIProvider(apiKey, baseURL, modelToUse);
5557
- } else if (activeProvider === "gemini" || activeProvider === "google") {
5558
- this.superAgentClient = new GeminiProvider(apiKey, baseURL, modelToUse);
5686
+ if (providerType === "openai") {
5687
+ this.superAgentClient = new OpenAIProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
5688
+ } else if (providerType === "gemini" || providerType === "google") {
5689
+ this.superAgentClient = new GeminiProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
5690
+ } else if (providerType === "grok") {
5691
+ this.superAgentClient = new GrokProvider(effectiveApiKey, effectiveBaseURL, effectiveModel);
5559
5692
  } else {
5560
- this.superAgentClient = new GrokProvider(apiKey, baseURL, modelToUse);
5693
+ this.superAgentClient = new OpenAICompatibleProvider(effectiveApiKey, effectiveBaseURL || "", effectiveModel, activeProviderId);
5561
5694
  }
5562
5695
  this.textEditor = new TextEditorTool;
5563
5696
  this.morphEditor = process.env.MORPH_API_KEY ? new MorphEditorTool : null;
@@ -5566,7 +5699,7 @@ class SuperAgent extends EventEmitter4 {
5566
5699
  this.confirmationTool = new ConfirmationTool;
5567
5700
  this.search = new SearchTool;
5568
5701
  this.projectMap = new ProjectMapTool;
5569
- this.tokenCounter = createTokenCounter(modelToUse);
5702
+ this.tokenCounter = createTokenCounter(effectiveModel);
5570
5703
  this.initializeMCP();
5571
5704
  const customInstructions = loadCustomInstructions();
5572
5705
  const customInstructionsSection = customInstructions ? `
@@ -5652,7 +5785,7 @@ Current working directory: ${process.cwd()}`
5652
5785
  });
5653
5786
  }
5654
5787
  isGrokModel() {
5655
- return this.superAgentClient.name === "grok";
5788
+ return this.superAgentClient.name === "grok" || this.superAgentClient.name.includes("grok");
5656
5789
  }
5657
5790
  shouldUseSearchFor(message) {
5658
5791
  const q = message.toLowerCase();
@@ -6069,6 +6202,22 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
6069
6202
  };
6070
6203
  }
6071
6204
  }
6205
+ abortCurrentOperation() {
6206
+ if (this.abortController) {
6207
+ this.abortController.abort();
6208
+ this.abortController = null;
6209
+ }
6210
+ }
6211
+ setModel(model) {
6212
+ this.superAgentClient.setModel(model);
6213
+ this.tokenCounter = createTokenCounter(model);
6214
+ }
6215
+ getCurrentModel() {
6216
+ return this.superAgentClient.getCurrentModel();
6217
+ }
6218
+ async executeBashCommand(command) {
6219
+ return await this.bash.execute(command);
6220
+ }
6072
6221
  async executeMCPTool(toolCall) {
6073
6222
  try {
6074
6223
  const args = JSON.parse(toolCall.function.arguments);
@@ -6091,7 +6240,7 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
6091
6240
  `);
6092
6241
  return {
6093
6242
  success: true,
6094
- output: output || "Success"
6243
+ output
6095
6244
  };
6096
6245
  } catch (error) {
6097
6246
  return {
@@ -6100,28 +6249,6 @@ Maximum tool execution rounds reached. Stopping to prevent infinite loops.`
6100
6249
  };
6101
6250
  }
6102
6251
  }
6103
- getChatHistory() {
6104
- return [...this.chatHistory];
6105
- }
6106
- getCurrentDirectory() {
6107
- return this.bash.getCurrentDirectory();
6108
- }
6109
- async executeBashCommand(command) {
6110
- return await this.bash.execute(command);
6111
- }
6112
- getCurrentModel() {
6113
- return this.superAgentClient.getCurrentModel();
6114
- }
6115
- setModel(model) {
6116
- this.superAgentClient.setModel(model);
6117
- this.tokenCounter.dispose();
6118
- this.tokenCounter = createTokenCounter(model);
6119
- }
6120
- abortCurrentOperation() {
6121
- if (this.abortController) {
6122
- this.abortController.abort();
6123
- }
6124
- }
6125
6252
  }
6126
6253
 
6127
6254
  // src/ui/components/api-key-input.tsx
@@ -7084,7 +7211,7 @@ import { program } from "commander";
7084
7211
  // package.json
7085
7212
  var package_default = {
7086
7213
  name: "@involvex/super-agent-cli",
7087
- version: "0.0.46",
7214
+ version: "0.0.48",
7088
7215
  description: "An open-source AI agent that brings the power of Super Agent directly into your terminal.",
7089
7216
  keywords: [
7090
7217
  "cli",
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@involvex/super-agent-cli",
3
- "version": "0.0.46",
3
+ "version": "0.0.48",
4
4
  "description": "An open-source AI agent that brings the power of Super Agent directly into your terminal.",
5
5
  "keywords": [
6
6
  "cli",